diff --git a/.clang-format b/.clang-format new file mode 100644 index 000000000000..2f14c8575147 --- /dev/null +++ b/.clang-format @@ -0,0 +1,2 @@ +# Allow unlimited column length, rather than 80. This prevents word-wrapping comments, which end up in Swagger. +ColumnLimit: 0 \ No newline at end of file diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000000..d943c48cf8bf --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,41 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/debian/.devcontainer/base.Dockerfile + +ARG VARIANT="bullseye" +FROM --platform=linux/amd64 mcr.microsoft.com/vscode/devcontainers/base:0-${VARIANT} + +ARG NEW_USERNAME="" +ARG HOME="" +ARG OLD_USERNAME="vscode" +ARG VSCODE_SCRIPT_VERSION="" + +ARG PROTOC_ZIP="protoc-3.14.0-linux-x86_64.zip" +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get install -y apt-transport-https ca-certificates git sudo \ + # use new user instead of vscode user + && usermod -l $NEW_USERNAME -d /home/$NEW_USERNAME -m $OLD_USERNAME \ + && groupmod -n $NEW_USERNAME $OLD_USERNAME \ + && echo $NEW_USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$NEW_USERNAME \ + && chmod 0440 /etc/sudoers.d/$NEW_USERNAME \ + # kubectl + && curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg \ + && echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list \ + && apt-get update \ + && apt-get install -y kubectl \ + # protobuf + && curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.14.0/${PROTOC_ZIP} \ + && unzip -o ${PROTOC_ZIP} -d /usr/local bin/protoc \ + && unzip -o ${PROTOC_ZIP} -d /usr/local 'include/*' \ + && rm -f ${PROTOC_ZIP} \ + && chmod 755 /usr/local/bin/protoc \ + && chmod -R 755 /usr/local/include/ \ + # k3d + && wget -q -O - https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash \ + # go + && bash -ec "$(curl -fsSL "https://raw.githubusercontent.com/microsoft/vscode-dev-containers/$VSCODE_SCRIPT_VERSION/script-library/go-debian.sh")" -- "1.18" "/usr/local/go" "$HOME/go" "automatic" "true" "false" \ + # dind + && bash -ec "$(curl -fsSL "https://raw.githubusercontent.com/microsoft/vscode-dev-containers/$VSCODE_SCRIPT_VERSION/script-library/docker-in-docker-debian.sh")" -- "true" "automatic" "true" "20.10" "v1" \ + # node + && bash -ec "$(curl -fsSL "https://raw.githubusercontent.com/microsoft/vscode-dev-containers/$VSCODE_SCRIPT_VERSION/script-library/node-debian.sh")" -- "/usr/local/share/nvm" "16" "automatic" "true" "true" \ + # python + && bash -ec "$(curl -fsSL "https://raw.githubusercontent.com/microsoft/vscode-dev-containers/$VSCODE_SCRIPT_VERSION/script-library/python-debian.sh")" -- "3.9" \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000000..49c7e50fd431 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,30 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: +// https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/debian +{ + "name": "Debian", + "build": { + "dockerfile": "Dockerfile", + "args": { + "VARIANT": "bullseye", + "NEW_USERNAME": "${localEnv:USER}", + "HOME": "${localEnv:HOME}", + "VSCODE_SCRIPT_VERSION": "v0.234.0" + } + }, + + "settings": {}, + + "extensions": [], + + "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ], + + "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ], + + "remoteUser": "${localEnv:USER}", + "features": {}, + + "workspaceMount": "source=${localWorkspaceFolder},target=${localEnv:HOME}/go/src/github.com/argoproj/argo-workflows,type=bind", + "workspaceFolder": "${localEnv:HOME}/go/src/github.com/argoproj/argo-workflows", + + "postCreateCommand": "bash -i .devcontainer/startup.sh" +} diff --git a/.devcontainer/startup.sh b/.devcontainer/startup.sh new file mode 100644 index 000000000000..2e3c1f39c4b6 --- /dev/null +++ b/.devcontainer/startup.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -e + +sudo apt update +sudo chown $USER:docker /var/run/docker.sock +sudo chown -fR $USER:golang $GOPATH + +echo $'127.0.0.1 dex\n127.0.0.1 minio\n127.0.0.1 postgres\n127.0.0.1 mysql\n127.0.0.1 azurite' | sudo tee -a /etc/hosts + +if k3d cluster list | grep k3s-default; +then + echo "skip k3s creation, k3s-default cluster already exist" +else + k3d cluster create +fi + +until k3d cluster start --wait ; do sleep 5 ; done +k3d kubeconfig merge k3s-default --kubeconfig-merge-default --kubeconfig-switch-context diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 4c88b912f518..000000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -name: Reproducible bug report -about: Create a reproducible bug report. Not for support requests. -labels: ['bug', 'triage'] ---- - - - -## Checklist - - - -* [ ] Double-checked my configuration. -* [ ] Tested using the latest version. -* [ ] Used the Emissary executor. - -## Summary - -What happened/what you expected to happen? - -What version are you running? - -## Diagnostics - -Paste the smallest workflow that reproduces the bug. We must be able to run the workflow. - -```yaml - -``` - -```bash -# Logs from the workflow controller: -kubectl logs -n argo deploy/workflow-controller | grep ${workflow} - -# If the workflow's pods have not been created, you can skip the rest of the diagnostics. - -# The workflow's pods that are problematic: -kubectl get pod -o yaml -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded - -# Logs from in your workflow's wait container, something like: -kubectl logs -c wait -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded -``` - ---- - -**Message from the maintainers**: - -Impacted by this bug? Give it a 👍. We prioritise the issues with the most 👍. diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml new file mode 100644 index 000000000000..4e6e657c7c0e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -0,0 +1,48 @@ +name: Reproducible bug report +description: Create a reproducible bug report. Not for support requests. +labels: [ bug ] +body: + - type: checkboxes + id: terms + attributes: + label: Pre-requisites + options: + - label: I have double-checked my configuration + required: true + - label: I can confirm the issues exists when I tested with `:latest` + required: true + - label: I'd like to contribute the fix myself (see [contributing guide](https://github.com/argoproj/argo-workflows/blob/master/docs/CONTRIBUTING.md)) + - type: textarea + id: description + attributes: + label: What happened/what you expected to happen? + validations: + required: true + - type: input + id: version + attributes: + label: Version + placeholder: e.g. v3.3.8 or latest + validations: + required: true + - type: textarea + id: failing-workflow + attributes: + label: Paste a small workflow that reproduces the issue. We must be able to run the workflow; don't enter a workflows that uses private images. + render: YAML + validations: + required: true + - type: textarea + id: controller-logs + attributes: + label: Logs from the workflow controller + value: kubectl logs -n argo deploy/workflow-controller | grep ${workflow} + validations: + required: true + - type: textarea + id: wait-logs + attributes: + label: Logs from in your workflow's wait container + value: kubectl logs -c wait -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 259837a4a2f9..a8865c0269f1 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -10,6 +10,3 @@ contact_links: - name: Chat on Slack url: https://argoproj.github.io/community/join-slack about: Maybe chatting with the community can help - - name: 30m to talk anything Argo - url: https://bit.ly/book-30m-with-argo-team - about: Sign-up for 30m with the core Argo engineers diff --git a/.github/ISSUE_TEMPLATE/regression.md b/.github/ISSUE_TEMPLATE/regression.md deleted file mode 100644 index 18691b095482..000000000000 --- a/.github/ISSUE_TEMPLATE/regression.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -name: Regression report -about: Create a regression report. Not for support requests. -labels: ['bug', 'regression', 'triage'] ---- -## Checklist - - - -* [ ] Double-checked my configuration. -* [ ] Tested using the latest version. -* [ ] Used the Emissary executor. - -## Summary - -What happened/what you expected to happen? - -What version are you running? - - -## Diagnostics - -Paste the smallest workflow that reproduces the bug. We must be able to run the workflow. - -```yaml - -``` - -```bash -# Logs from the workflow controller: -kubectl logs -n argo deploy/workflow-controller | grep ${workflow} - -# If the workflow's pods have not been created, you can skip the rest of the diagnostics. - -# The workflow's pods that are problematic: -kubectl get pod -o yaml -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded - -# Logs from in your workflow's wait container, something like: -kubectl logs -c wait -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded -``` - ---- - -**Message from the maintainers**: - -Impacted by this regression? Give it a 👍. We prioritise the issues with the most 👍. diff --git a/.github/ISSUE_TEMPLATE/regression.yaml b/.github/ISSUE_TEMPLATE/regression.yaml new file mode 100644 index 000000000000..cd14eef0dd2d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/regression.yaml @@ -0,0 +1,49 @@ +name: Regression report +description: Create a regression report. Not for support requests. +labels: [ bug, regression ] + +body: + - type: checkboxes + id: terms + attributes: + label: Pre-requisites + options: + - label: I have double-checked my configuration + required: true + - label: I can confirm the issues exists when I tested with `:latest` + required: true + - label: I'd like to contribute the fix myself (see [contributing guide](https://github.com/argoproj/argo-workflows/blob/master/docs/CONTRIBUTING.md)) + - type: textarea + id: description + attributes: + label: What happened/what you expected to happen? + validations: + required: true + - type: input + id: version + attributes: + label: Version + placeholder: e.g. v3.3.8 or latest + validations: + required: true + - type: textarea + id: failing-workflow + attributes: + label: Paste a small workflow that reproduces the issue. We must be able to run the workflow; don't enter a workflows that uses private images. + render: YAML + validations: + required: true + - type: textarea + id: controller-logs + attributes: + label: Logs from the workflow controller + value: kubectl logs -n argo deploy/workflow-controller | grep ${workflow} + validations: + required: true + - type: textarea + id: wait-logs + attributes: + label: Logs from in your workflow's wait container + value: kubectl logs -c wait -l workflows.argoproj.io/workflow=${workflow},workflow.argoproj.io/phase!=Succeeded + validations: + required: true diff --git a/.github/dependabot.yml b/.github/dependabot.yml index af8fd7f28499..caa13f04034f 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,14 +3,26 @@ updates: - package-ecosystem: "gomod" directory: "/" schedule: - interval: "daily" + interval: "weekly" + day: "saturday" ignore: - dependency-name: k8s.io/* - dependency-name: github.com/grpc-ecosystem/* - dependency-name: google.golang.org/grpc - open-pull-requests-limit: 2 + open-pull-requests-limit: 10 - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "daily" + interval: "weekly" + day: "saturday" + + - package-ecosystem: "npm" + directory: "/ui" + schedule: + interval: "weekly" + day: "saturday" + ignore: + - dependency-name: raw-loader + - dependency-name: style-loader + open-pull-requests-limit: 10 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 18c987fb2a65..d6f5b1b9b7b9 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -16,7 +16,7 @@ If you did not do this, reset all your commit and replace them with a single com ``` git reset HEAD~1 ;# change 1 to how many commits you made -git commit --sign-off -m 'feat: my feat. Fixes #1234' +git commit --signoff -m 'feat: my feat. Fixes #1234' ``` When creating your PR: diff --git a/.github/stale.yml b/.github/stale.yml index 9b0df8afb1a9..069f95dd7e5f 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,12 +1,11 @@ # https://probot.github.io/apps/stale/ # Number of days of inactivity before an issue becomes stale -daysUntilStale: 7 +daysUntilStale: 14 # Number of days of inactivity before a stale issue is closed -daysUntilClose: 3 +daysUntilClose: 7 # Issues with these labels will never be considered stale exemptLabels: - enhancement - - mentoring - pinned - security - tech-debt @@ -15,7 +14,9 @@ staleLabel: stale # Comment to post when marking an issue as stale. Set to `false` to disable markComment: > This issue has been automatically marked as stale because it has not had - recent activity. It will be closed if no further activity occurs. Thank you - for your contributions. + recent activity. It will be closed if no further activity occurs. If this is a mentoring request, + please provide an update here. Thank you for your contributions. # Comment to post when closing a stale issue. Set to `false` to disable -closeComment: false \ No newline at end of file +closeComment: > + This issue has been closed due to inactivity. Feel free to re-open if you + still encounter this issue. diff --git a/.github/workflows/changelog.yaml b/.github/workflows/changelog.yaml index 0f3a8e4df657..e9eea357dbe1 100644 --- a/.github/workflows/changelog.yaml +++ b/.github/workflows/changelog.yaml @@ -5,8 +5,14 @@ on: tags: - v* - "!v0.0.0" +permissions: + contents: read + jobs: generate_changelog: + permissions: + contents: write # for peter-evans/create-pull-request to create branch + pull-requests: write # for peter-evans/create-pull-request to create a PR if: github.repository == 'argoproj/argo-workflows' runs-on: ubuntu-latest name: Generate changelog diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml index e046e7af5c93..f308bf335cd1 100644 --- a/.github/workflows/ci-build.yaml +++ b/.github/workflows/ci-build.yaml @@ -13,6 +13,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: tests: name: Unit Tests @@ -20,9 +23,9 @@ jobs: timeout-minutes: 8 steps: - uses: actions/checkout@v3 - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v3 with: - go-version: "1.17" + go-version: "1.18" # https://github.com/actions/cache/blob/main/examples.md#go---modules - uses: actions/cache@v3 with: @@ -32,7 +35,7 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - run: make test STATIC_FILES=false GOTEST='go test -covermode=atomic -coverprofile=coverage.out' + - run: make test STATIC_FILES=false GOTEST='go test -p 20 -covermode=atomic -coverprofile=coverage.out' # engineers just ignore this in PRs, so lets not even run it - run: bash <(curl -s https://codecov.io/bash) if: github.ref == 'refs/heads/master' @@ -44,7 +47,7 @@ jobs: # needs: [ lint ] steps: - uses: actions/checkout@v3 - - uses: docker/setup-buildx-action@v1 + - uses: docker/setup-buildx-action@v2 - name: Cache Docker layers uses: actions/cache@v3 with: @@ -62,7 +65,7 @@ jobs: --output=type=docker \ . - run: docker save quay.io/argoproj/argoexec:latest > /tmp/argoexec_image.tar - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: argoexec path: /tmp/argoexec_image.tar @@ -78,50 +81,46 @@ jobs: e2e-tests: name: E2E Tests runs-on: ubuntu-latest - timeout-minutes: 20 + timeout-minutes: 25 needs: [ tests, argoexec-image ] env: KUBECONFIG: /home/runner/.kubeconfig strategy: fail-fast: false - max-parallel: 4 matrix: include: - - test: test-plugins - containerRuntimeExecutor: emissary - profile: plugins + - test: test-executor + profile: minimal + - test: test-corefunctional + profile: minimal - test: test-functional - containerRuntimeExecutor: emissary profile: minimal - test: test-api - containerRuntimeExecutor: emissary profile: mysql - test: test-cli - containerRuntimeExecutor: emissary profile: mysql - test: test-cron - containerRuntimeExecutor: emissary profile: minimal - test: test-examples - containerRuntimeExecutor: emissary profile: minimal - - test: test-executor - containerRuntimeExecutor: emissary - profile: minimal - - test: test-executor - containerRuntimeExecutor: docker - profile: minimal - - test: test-executor - containerRuntimeExecutor: kubelet + - test: test-plugins + profile: plugins + - test: test-java-sdk profile: minimal - - test: test-executor - containerRuntimeExecutor: pns + - test: test-python-sdk profile: minimal steps: - uses: actions/checkout@v3 - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v3 + with: + go-version: "1.18" + - uses: actions/setup-java@v3 with: - go-version: "1.17" + java-version: '8' + distribution: adopt + - uses: actions/setup-python@v4 + with: + python-version: '3.x' - uses: actions/cache@v3 with: path: | @@ -139,7 +138,7 @@ jobs: echo " user:" >> $KUBECONFIG echo " token: xxxxxx" >> $KUBECONFIG until kubectl cluster-info ; do sleep 10s ; done - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: name: argoexec path: /tmp @@ -150,14 +149,21 @@ jobs: echo '127.0.0.1 minio' | sudo tee -a /etc/hosts echo '127.0.0.1 postgres' | sudo tee -a /etc/hosts echo '127.0.0.1 mysql' | sudo tee -a /etc/hosts - - run: make install PROFILE=${{matrix.profile}} E2E_EXECUTOR=${{matrix.containerRuntimeExecutor}} STATIC_FILES=false + echo '127.0.0.1 azurite' | sudo tee -a /etc/hosts + - run: make install PROFILE=${{matrix.profile}} STATIC_FILES=false - run: make controller $(go env GOPATH)/bin/goreman STATIC_FILES=false - run: make cli STATIC_FILES=false - if: ${{matrix.test == 'test-api' || matrix.test == 'test-cli'}} - - run: make start PROFILE=${{matrix.profile}} E2E_EXECUTOR=${{matrix.containerRuntimeExecutor}} AUTH_MODE=client STATIC_FILES=false LOG_LEVEL=info API=${{matrix.test == 'test-api' || matrix.test == 'test-cli'}} UI=false > /tmp/argo.log 2>&1 & + if: ${{matrix.test == 'test-api' || matrix.test == 'test-cli' || matrix.test == 'test-java-sdk' || matrix.test == 'test-python-sdk'}} + - run: make start PROFILE=${{matrix.profile}} AUTH_MODE=client STATIC_FILES=false LOG_LEVEL=info API=${{matrix.test == 'test-api' || matrix.test == 'test-cli' || matrix.test == 'test-java-sdk' || matrix.test == 'test-python-sdk'}} UI=false AZURE=true > /tmp/argo.log 2>&1 & - run: make wait timeout-minutes: 4 - - run: make ${{matrix.test}} E2E_TIMEOUT=1m STATIC_FILES=false + - name: make ${{matrix.test}} + # https://github.com/marketplace/actions/retry-step + uses: nick-fields/retry@v2.8.1 + with: + timeout_minutes: 20 + max_attempts: 2 + command: make ${{matrix.test}} E2E_SUITE_TIMEOUT=20m STATIC_FILES=false AZURE=true - if: ${{ failure() }} run: | [ -e /tmp/argo.log ] && cat /tmp/argo.log @@ -171,9 +177,9 @@ jobs: GOPATH: /home/runner/go steps: - uses: actions/checkout@v3 - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v3 with: - go-version: "1.17" + go-version: "1.18" - uses: actions/cache@v3 with: path: | @@ -209,12 +215,18 @@ jobs: GOPATH: /home/runner/go steps: - uses: actions/checkout@v3 + - uses: actions/setup-go@v3 with: - fetch-depth: 0 - - run: cp server/static/files.go.stub server/static/files.go - - uses: golangci/golangci-lint-action@v2 + go-version: "1.18" + - uses: actions/cache@v3 with: - version: v1.42.0 + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - run: make lint STATIC_FILES=false - run: git diff --exit-code ui: @@ -237,3 +249,7 @@ jobs: - run: yarn --cwd ui test - run: yarn --cwd ui lint - run: git diff --exit-code + # check to see if it'll start (but not if it'll render) + - run: yarn --cwd ui start & + - run: until curl http://localhost:8080 > /dev/null ; do sleep 10s ; done + timeout-minutes: 1 diff --git a/.github/workflows/dependabot-reviewer.yml b/.github/workflows/dependabot-reviewer.yml new file mode 100644 index 000000000000..8110f0e7a704 --- /dev/null +++ b/.github/workflows/dependabot-reviewer.yml @@ -0,0 +1,28 @@ +# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions +name: Approve and enable auto-merge for dependabot +on: pull_request + +permissions: + pull-requests: write + contents: write + +jobs: + review: + runs-on: ubuntu-latest + if: ${{ github.actor == 'dependabot[bot]' && github.repository == 'argoproj/argo-workflows'}} + steps: + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@v1.3.3 + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + - name: Approve PR + run: gh pr review --approve "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + - name: Enable auto-merge for Dependabot PRs + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file diff --git a/.github/workflows/gh-pages.yaml b/.github/workflows/gh-pages.yaml index 9fab807cba9c..dc08efb64d51 100644 --- a/.github/workflows/gh-pages.yaml +++ b/.github/workflows/gh-pages.yaml @@ -1,30 +1,46 @@ -name: Deploy +name: Docs on: push: branches: - master + pull_request: + branches: + - master concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: - deploy: - if: github.repository == 'argoproj/argo-workflows' + docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: 3.9 - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v3 + with: + go-version: '1.18' + - uses: actions/setup-node@v3 + with: + node-version: "16" + # Use the same make target both locally and on CI to make it easier to debug failures. + - run: make docs + # If markdownlint fixes issues, files will be changed. If so, fail the build. + - run: git diff --exit-code + # Upload the site so reviewers see it. + - uses: actions/upload-artifact@v3 with: - go-version: '1.17' - - run: pip install mkdocs==1.2.3 mkdocs_material==8.1.9 - - run: mkdocs build - - run: make parse-examples + name: docs + path: site + if-no-files-found: error - uses: peaceiris/actions-gh-pages@v2.9.0 + if: github.repository == 'argoproj/argo-workflows' && github.ref == 'refs/heads/master' env: PERSONAL_TOKEN: ${{ secrets.PERSONAL_TOKEN }} PUBLISH_BRANCH: gh-pages diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 3cee95e17b27..cdfeb0846a38 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -16,6 +16,9 @@ defaults: run: shell: bash +permissions: + contents: read + jobs: build-linux-amd64: name: Build & push linux/amd64 @@ -29,7 +32,7 @@ jobs: - uses: actions/checkout@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v2 - name: Cache Docker layers uses: actions/cache@v3 @@ -41,13 +44,13 @@ jobs: ${{ runner.os }}-${{ matrix.platform }}-${{ matrix.target }}-buildx- - name: Docker Login - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKERIO_USERNAME }} password: ${{ secrets.DOCKERIO_PASSWORD }} - name: Docker Login - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: registry: quay.io username: ${{ secrets.QUAYIO_USERNAME }} @@ -95,12 +98,12 @@ jobs: - uses: actions/checkout@v3 - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v2 with: platforms: arm64 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v2 - name: Cache Docker layers uses: actions/cache@v3 @@ -112,13 +115,13 @@ jobs: ${{ runner.os }}-${{ matrix.platform }}-${{ matrix.target }}-buildx- - name: Docker Login - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKERIO_USERNAME }} password: ${{ secrets.DOCKERIO_PASSWORD }} - name: Docker Login - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: registry: quay.io username: ${{ secrets.QUAYIO_USERNAME }} @@ -317,6 +320,8 @@ jobs: done publish-release: + permissions: + contents: write # for softprops/action-gh-release to create GitHub release runs-on: ubuntu-latest if: github.repository == 'argoproj/argo-workflows' needs: [ push-images, test-images-linux-amd64, test-images-windows ] @@ -327,9 +332,9 @@ jobs: - uses: actions/setup-node@v3 with: node-version: "16" - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v3 with: - go-version: "1.17" + go-version: "1.18" - uses: actions/cache@v3 with: path: ui/node_modules diff --git a/.github/workflows/sdks.yaml b/.github/workflows/sdks.yaml index cf6036f93b21..2d7f1ab82db4 100644 --- a/.github/workflows/sdks.yaml +++ b/.github/workflows/sdks.yaml @@ -3,8 +3,16 @@ on: push: tags: - v* + +permissions: + contents: read + jobs: sdk: + permissions: + contents: read + packages: write # for publishing packages + contents: write # for creating releases if: github.repository == 'argoproj/argo-workflows' runs-on: ubuntu-latest name: Publish SDK diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index d986eaa70b4a..903e150218ea 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -2,6 +2,9 @@ name: Snyk on: schedule: - cron: "30 2 * * *" +permissions: + contents: read + jobs: # we do not scan images here, they're scanned here: https://app.snyk.io/org/argoproj/projects diff --git a/.gitignore b/.gitignore index 4792506f8cb7..012b8b068077 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,7 @@ git-ask-pass.sh sdks/python/client/dist/* /v3/ /cmd/argoexec/commands/test.txt + +# Do not commit rendered installation manifests since they are misleading to users. +manifests/install.yaml +manifests/namespace-install.yaml diff --git a/.golangci.yml b/.golangci.yml index b2e9b37a1acd..8eb46041a64a 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,6 +1,5 @@ # https://golangci-lint.run/usage/quick-start/ run: - concurrency: 4 timeout: 8m skip-dirs: - dist @@ -20,6 +19,7 @@ run: - cron - executor - examples + - corefunctional - functional - plugins linters: @@ -31,7 +31,8 @@ linters: # only minor issues # - errorlint - exportloopref - - gci + # seems to have bugs in recent version, also slow + # - gci - gosec - gosimple - govet diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 000000000000..261ef7e65178 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,5 @@ +# not fix for line length +MD013: false +# mkdocs uses 4 spaces indent +MD007: + indent: 4 diff --git a/.mlc_config.json b/.mlc_config.json new file mode 100644 index 000000000000..946725d2fec4 --- /dev/null +++ b/.mlc_config.json @@ -0,0 +1,11 @@ +{ + "ignorePatterns": [ + { + "pattern": ".*localhost.*" + } + ], + "aliveStatusCodes": [ + 200, + 429 + ] +} diff --git a/.spelling b/.spelling new file mode 100644 index 000000000000..507e9340a9a2 --- /dev/null +++ b/.spelling @@ -0,0 +1,216 @@ +# markdown-spellcheck spelling configuration file +# Format - lines beginning # are comments +# global dictionary is at the start, file overrides afterwards +# one word per line, to define a file override use ' - filename' +# where filename is relative to this configuration file +000s +0s +100Mi +100s +10h +10s +120s +120sec +1Gi +1Mi +1h +1m +2Gi +2h +30s +3min +3s +4Gi +4xx +512Mi +5m +5xx +8Ki +90m +Alexandre +Alibaba +Ang +Anthos +ArgoLabs +Artifactory +BlackRock +Breitgand +Couler +DataDog +Dataflow +DeleteObject +DevOps +Dex +EtcD +EventRouter +FailFast +GSoC +GitOps +Github +Golang +Grafana +Grammarly +Hadoop +Heptio +Homebrew +InsideBoard +Invocators +Istio +J.P. +Jemison +JetBrains +KNative +Katacoda +Kerberos +KubectlExec +Kubeflow +Kustomize +Lifecycle-Hook +LitmusChaos +metadata +MLOps +MinIO +Minikube +MySQL +Nagal +Nano +Nginx +Node.JS. +OAuth +OAuth2 +Okta +parameterize +parameterized +parameterizing +PDBs +PProf +PVCs +Peixuan +Ploomber +Postgres +Roadmap +RoleBinding +s3 +SDKs +Sharding +Singer.io +Snyk +Sumit +Tekton +Tianchu +Traefik +TripAdvisor +VSCode +Valasek +Webhooks +Welch +`CronTab` +`OnFailure` +a.m. +alexec +anded +apis +architecting +argo +args +async +auth +backend +blkperl +boolean +booleans +buildkit +config +cpu +cron +daemoned +dev-container +dinever +dropdown +e.g. +e2e +entrypoint +enum +env +errored +expr +fibonacci +finalizer +govaluate +gzipped +i.e. +instantiator +instantiators +jenkins +k3d +k3s +k8s-jobs +kube +kubelet +kubernetes +localhost +memoization +memoized +memoizing +mentee +mentees +minikube +mutex +namespace +namespaces +natively +p.m. +params +pre-commit +rc2 +repo +roadmap +runtime +runtimes +sandboxed +sarabala1979 +simster7 +stateful +stderr +tczhao +terrytangyuan +themself +un-reconciled +untracked +v1 +v1.0 +v1.1 +v1.2 +v1.3 +v2 +v2.10 +v2.11 +v2.12 +v2.23.0 +v2.4 +v2.5 +v2.6 +v2.7 +v2.7.2 +v2.8 +v2.9 +v3.0 +v3.0.0 +v3.1 +v3.1.4 +v3.2 +v3.2. +v3.3 +v3.3. +v3.4 +v3.4. +validator +versioning +webHDFS +webhook +webhooks +workflow-controller-configmap +yaml +idempotence +kube-scheduler +kube-apiserver diff --git a/CHANGELOG.md b/CHANGELOG.md index 081daf69e99c..90cf195d0349 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,733 @@ # Changelog +## v3.4.0 (2022-09-18) + + * [047952afd](https://github.com/argoproj/argo-workflows/commit/047952afd539d06cae2fd6ba0b608b19c1194bba) fix: SDK workflow file + * [97328f1ed](https://github.com/argoproj/argo-workflows/commit/97328f1ed3885663b780f43e6b553208ecba4d3c) chore(deps): bump classnames and @types/classnames in /ui (#9603) + * [2dac194a5](https://github.com/argoproj/argo-workflows/commit/2dac194a52acb46c5535e5f552fdf7fd520d0f4e) chore(deps-dev): bump @babel/core from 7.19.0 to 7.19.1 in /ui (#9602) + * [47544cc02](https://github.com/argoproj/argo-workflows/commit/47544cc02a8663b5b69e4c213a382ff156deb63e) feat: Support retrying complex workflows with nested group nodes (#9499) + * [30bd96b4c](https://github.com/argoproj/argo-workflows/commit/30bd96b4c030fb728a3da78e0045982bf778d554) fix: Error message if cronworkflow failed to update (#9583) + * [fc5e11cd3](https://github.com/argoproj/argo-workflows/commit/fc5e11cd37f51e36517f7699c23afabac4f08528) chore(deps-dev): bump webpack-dev-server from 4.10.1 to 4.11.0 in /ui (#9567) + * [ace179804](https://github.com/argoproj/argo-workflows/commit/ace179804996edc0d356bff257a980e60b9bc5a0) docs(dev-container): Fix buildkit doc for local dev (#9580) + +### Contributors + + * JM + * Saravanan Balasubramanian + * Yuan Tang + * dependabot[bot] + +## v3.4.0-rc4 (2022-09-10) + + * [dee4ea5b0](https://github.com/argoproj/argo-workflows/commit/dee4ea5b0be2408e13af7745db910d0130e578f2) chore(deps-dev): bump @babel/core from 7.18.13 to 7.19.0 in /ui (#9566) + * [8172b493d](https://github.com/argoproj/argo-workflows/commit/8172b493d649c20b0b72ae56cf5b69bd2fa5ed8d) chore(deps-dev): bump sass from 1.54.8 to 1.54.9 in /ui (#9565) + * [68a793586](https://github.com/argoproj/argo-workflows/commit/68a793586ed8154f71d156e9daa8055e7ea8492e) chore(deps-dev): bump @babel/preset-env from 7.18.10 to 7.19.0 in /ui (#9562) + * [e1d8387fa](https://github.com/argoproj/argo-workflows/commit/e1d8387fa7a9c0648c548e2809f61eb77a802537) chore(deps-dev): bump babel-jest from 29.0.1 to 29.0.2 in /ui (#9564) + * [3950f8c1c](https://github.com/argoproj/argo-workflows/commit/3950f8c1c12ff7451b3e1be96b2ba108025a9677) chore(deps): bump google.golang.org/api from 0.94.0 to 0.95.0 (#9561) + * [8310bdbc9](https://github.com/argoproj/argo-workflows/commit/8310bdbc9d07f87640d944b949e465a044148368) chore(deps): bump github.com/coreos/go-oidc/v3 from 3.3.0 to 3.4.0 (#9560) + * [baaa8d0a9](https://github.com/argoproj/argo-workflows/commit/baaa8d0a9e90f5234ce7d02cbc33f8756a3ad4da) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.35 to 7.0.36 (#9558) + * [aab923452](https://github.com/argoproj/argo-workflows/commit/aab92345267e9e0562ee8495f49ac6d80e06ae28) chore(deps): bump github.com/spf13/viper from 1.12.0 to 1.13.0 (#9559) + * [ec7c210c9](https://github.com/argoproj/argo-workflows/commit/ec7c210c9743d8f85d528d5593bc7390d73ff534) fix: use urlencode instead of htmlencode to sanitize url (#9538) + * [3a3f15997](https://github.com/argoproj/argo-workflows/commit/3a3f1599718453ca79800cfc28f6631ee780911b) fix: enable workflow-aggregate-roles to treat workflowtaskresults. Fixes #9545 (#9546) + * [9d66b69f0](https://github.com/argoproj/argo-workflows/commit/9d66b69f0bca92d7ef0c9aa67e87a2e334797530) fix: for pod that's been GC'ed we need to get the log from the artifact (#9540) + * [34a4e48c3](https://github.com/argoproj/argo-workflows/commit/34a4e48c3f412ba89cd0491469d13a14fdaf51b3) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.34 to 7.0.35 (#9502) + * [ef6bd5710](https://github.com/argoproj/argo-workflows/commit/ef6bd5710e5780afe40321f4d384471d9e02197c) fix: Capture exit code of signaled containers. Fixes #9415 (#9523) + * [6e2f15f9e](https://github.com/argoproj/argo-workflows/commit/6e2f15f9eea82f1344f139800869f9e7fd255b04) feat: added support for DAG task name as variables in workflow templates (#9387) + * [f27475feb](https://github.com/argoproj/argo-workflows/commit/f27475feb850dc43e07c3c5215cc9638947f0859) fix: default to 'main' container in Sensor logs. Fixes #9459 (#9438) + * [c00fbf88f](https://github.com/argoproj/argo-workflows/commit/c00fbf88f15104673b05ba5e109a72fed84dd38e) feat: Add node ID to node info panel (#9500) + * [2a80a2c1a](https://github.com/argoproj/argo-workflows/commit/2a80a2c1a9b0a2370f547492ef9168ee583077f5) fix: revert accidental commit in UI logs viewer (#9515) + * [b9d02cfd5](https://github.com/argoproj/argo-workflows/commit/b9d02cfd59c72b2bc8e437e6591ca4a145a3eb9b) chore(deps): bump cloud.google.com/go/storage from 1.25.0 to 1.26.0 (#9506) + * [bd9fc66c5](https://github.com/argoproj/argo-workflows/commit/bd9fc66c52c8e14123e5d7a4a7829023a072da9f) chore(deps-dev): bump @fortawesome/fontawesome-free from 6.1.2 to 6.2.0 in /ui (#9513) + * [9004f5e26](https://github.com/argoproj/argo-workflows/commit/9004f5e263a4ead8a5be4a4a09db03064eb1d453) chore(deps): bump google.golang.org/api from 0.93.0 to 0.94.0 (#9505) + * [605b0a0eb](https://github.com/argoproj/argo-workflows/commit/605b0a0eb3413107e2e87d6f3399d6b5f2778727) chore(deps-dev): bump sass from 1.54.5 to 1.54.8 in /ui (#9514) + * [6af53eff3](https://github.com/argoproj/argo-workflows/commit/6af53eff34180d9d238ba0fd0cb5a5b9b57b15a5) chore(deps-dev): bump babel-jest from 28.1.3 to 29.0.1 in /ui (#9512) + * [a2c20d70e](https://github.com/argoproj/argo-workflows/commit/a2c20d70e8885937532055b8c2791799020057ec) chore(deps): bump react-monaco-editor from 0.49.0 to 0.50.1 in /ui (#9509) + * [041d1382d](https://github.com/argoproj/argo-workflows/commit/041d1382d0a22a8bb88e88486f79c6b4bb6dfc8d) chore(deps-dev): bump webpack-dev-server from 4.10.0 to 4.10.1 in /ui (#9510) + * [7f9a15e77](https://github.com/argoproj/argo-workflows/commit/7f9a15e77eaa84d7f5474d28e30e52a77ca76b2e) chore(deps-dev): bump @babel/core from 7.18.10 to 7.18.13 in /ui (#9507) + * [08963c468](https://github.com/argoproj/argo-workflows/commit/08963c4680353a0b4e0abf16f0590a66b8dd4b3e) chore(deps-dev): bump @types/dagre from 0.7.47 to 0.7.48 in /ui (#9508) + * [1b09c8641](https://github.com/argoproj/argo-workflows/commit/1b09c8641ad11680b90dba582b3eae98dcee01c3) chore(deps): bump github.com/coreos/go-oidc/v3 from 3.2.0 to 3.3.0 (#9504) + * [4053ddf08](https://github.com/argoproj/argo-workflows/commit/4053ddf081755df8819a4a33ce558c92235ea81d) chore(deps): bump github.com/aliyun/aliyun-oss-go-sdk from 2.2.4+incompatible to 2.2.5+incompatible (#9503) + * [06d295752](https://github.com/argoproj/argo-workflows/commit/06d29575210d7b61ca7c7f2fb8e28fdd6c3d5637) feat: log format option for main containers (#9468) + +### Contributors + + * Alex Collins + * Julie Vogelman + * Rohan Kumar + * Takao Shibata + * Thomas Bonfort + * Tianchu Zhao + * Yuan Tang + * dependabot[bot] + * jsvk + +## v3.4.0-rc3 (2022-08-31) + + * [b941fbcab](https://github.com/argoproj/argo-workflows/commit/b941fbcaba087d5c5569573d1ef1a027313174ce) feat: improve e2e test for ArtifactGC (#9448) + * [94608d1dd](https://github.com/argoproj/argo-workflows/commit/94608d1ddc8781a55563f52ea65476dc99a54f94) feat: added support for artifact GC on GCS (#9420) + * [26ab0aed8](https://github.com/argoproj/argo-workflows/commit/26ab0aed8ba19571ffe3a2b048fcb43cbd1986e3) fix: link to "get artifacts from logs" was assuming Node ID was equal to Pod Name (#9464) + * [9cce91ea0](https://github.com/argoproj/argo-workflows/commit/9cce91ea0ca748cb35bd653c6f401d1aed97e6e8) Update USERS.md (#9471) + * [7118e1224](https://github.com/argoproj/argo-workflows/commit/7118e1224283ecb894794fdd72526089409e1476) feat: support slash in synchronization lock names. Fixes #9394 (#9404) + * [ff4109928](https://github.com/argoproj/argo-workflows/commit/ff4109928bd09a1b1d716cbdf82bd3ca132276d1) fix: Descendants of suspended nodes need to be removed when retrying workflow (#9440) + * [a09172afa](https://github.com/argoproj/argo-workflows/commit/a09172afafdb98ab362058618b5dc61980f0254e) fix: Incorrect alignment for archived workflow. Fixes #9433 (#9439) + * [04d19435c](https://github.com/argoproj/argo-workflows/commit/04d19435cb07e8815f1f95cca6751f8ce6b4bec1) fix: Properly reset suspended and skipped nodes when retrying (#9422) + * [de6b5ae6f](https://github.com/argoproj/argo-workflows/commit/de6b5ae6fa39693b7cd7777b9fcff9ff291476dd) fix(executor): Resource template gets incorrect plural for certain types (#9396) + * [3ddbb5e00](https://github.com/argoproj/argo-workflows/commit/3ddbb5e009f39fdb31cdaa7d77fca71dc3ae3f0e) fix: Only validate manifests for certain resource actions. Fixes #9418 (#9419) + * [a91e0041c](https://github.com/argoproj/argo-workflows/commit/a91e0041c9583deb48751c666dbbef111f3a56f9) fix: Workflow level http template hook status update. Fixes #8529 (#8586) + * [343c29819](https://github.com/argoproj/argo-workflows/commit/343c29819ac92d35f5db8a0de432f63df148ea31) fix: Argo waiter: invalid memory address or nil pointer dereference (#9408) + * [6f19e50a4](https://github.com/argoproj/argo-workflows/commit/6f19e50a41a17dbf06e6281f005ade6a2f19dba4) fix: Invalid memory address or nil pointer dereference (#9409) + * [7d9319b60](https://github.com/argoproj/argo-workflows/commit/7d9319b60d0bc417b25d35968c1619e51c13b7ec) Fix: UI to reflect Template.ArchiveLocation when showing Artifact's bucket in URN (#9351) + * [b7904c41c](https://github.com/argoproj/argo-workflows/commit/b7904c41c008176f40bb69c312b38ce6c0f9ce03) chore(deps-dev): bump sass from 1.54.4 to 1.54.5 in /ui (#9402) + * [fa66ed8e8](https://github.com/argoproj/argo-workflows/commit/fa66ed8e8bc20c4d759eb923b99dd6641ceafa86) chore(deps): bump github.com/tidwall/gjson from 1.14.2 to 1.14.3 (#9401) + +### Contributors + + * Brian Tate + * Julie Vogelman + * Rohan Kumar + * Saravanan Balasubramanian + * William Reed + * Yuan Tang + * dependabot[bot] + * jsvk + +## v3.4.0-rc2 (2022-08-18) + + * [6e8d1629d](https://github.com/argoproj/argo-workflows/commit/6e8d1629d9eebf78dce07f180ee99a233e422a80) fix: Artifact panel crashes when viewing artifacts. Fixes #9391 (#9392) + * [aa23a9ec8](https://github.com/argoproj/argo-workflows/commit/aa23a9ec8b9fc95593fdc41e1632412542a9c050) fix: Exit handle and Lifecycle hook to access {steps/tasks status} (#9229) + * [74cdf5d87](https://github.com/argoproj/argo-workflows/commit/74cdf5d870cc4d0b5576e6d78da7a6fde6a1be99) fix: improper selfLinks for cluster-scoped resources. Fixes #9320 (#9375) + * [f53d4834a](https://github.com/argoproj/argo-workflows/commit/f53d4834a208f39797637d7fad744caf0540cff8) fix: Panic on nill pointer when running a workflow with restricted parallelism (#9385) + * [c756291f7](https://github.com/argoproj/argo-workflows/commit/c756291f701296b36411ccdd639a965a302a5af8) fix: removed error check which prevented deleting successful artGC wfs. (#9383) + * [81e3d23e7](https://github.com/argoproj/argo-workflows/commit/81e3d23e730d80f24c90feb283fa3ff3b358e215) chore(deps): bump google.golang.org/api from 0.91.0 to 0.93.0 (#9381) + * [62b0db982](https://github.com/argoproj/argo-workflows/commit/62b0db9822ef93732544667739b33c1d9792ccf9) fix(ui): Correctly show icons in DAG. Fixes #9372 & #9373 (#9378) + * [47f59c050](https://github.com/argoproj/argo-workflows/commit/47f59c050ed579cdf9e01eddf0f388ac52fe5713) chore(deps): bump cloud.google.com/go/storage from 1.24.0 to 1.25.0 (#9357) + * [65670a402](https://github.com/argoproj/argo-workflows/commit/65670a402b1e9a96d246fd2ee363dd27a7f3149b) fix: Fix blank workflow details page after workflow submission (#9377) + * [6d08098a8](https://github.com/argoproj/argo-workflows/commit/6d08098a887c701cfffb2ea57f0391d6f7f5d489) feat: add argo delete --force. Fixes #9315. (#9321) + * [12466b7c9](https://github.com/argoproj/argo-workflows/commit/12466b7c9138052150afa6e0e81964d91a0538f5) fix: Retry for http timeout error. Fixes #9271 (#9335) + * [1b252fd33](https://github.com/argoproj/argo-workflows/commit/1b252fd33c8e456af0f6ed437b4f74a6d8cb46e7) chore(deps-dev): bump sass from 1.54.3 to 1.54.4 in /ui (#9359) + * [3f56a74dd](https://github.com/argoproj/argo-workflows/commit/3f56a74dd44e6e28da5bf2fc28cf03bae9b9f5c1) chore(deps-dev): bump webpack-dev-server from 4.9.3 to 4.10.0 in /ui (#9358) + * [fd08b0339](https://github.com/argoproj/argo-workflows/commit/fd08b0339506f8f11288393061cf8c2eb155403a) fix: ArtifactGC e2e test was looking for the wrong artifact names (#9353) + * [b430180d2](https://github.com/argoproj/argo-workflows/commit/b430180d275adac05d64b82613134b926d4405f1) fix: Deleted pods are not tracked correctly when retrying workflow (#9340) + * [e12c697b7](https://github.com/argoproj/argo-workflows/commit/e12c697b7be2547cdffd18c73bf39e10dfa458f0) feat: fix bugs in retryWorkflow if failed pod node has children nodes. Fix #9244 (#9285) + * [61f252f1d](https://github.com/argoproj/argo-workflows/commit/61f252f1d2083e5e9f262d0acd72058571e27708) fix: TestWorkflowStepRetry's comment accurately reflects what it does. (#9234) + +### Contributors + + * Alex Collins + * Dillen Padhiar + * Julie Vogelman + * Kyle Wong + * Robert Kotcher + * Saravanan Balasubramanian + * Yuan Tang + * dependabot[bot] + * jingkai + * smile-luobin + +## v3.4.0-rc1 (2022-08-09) + + * [f481e3b74](https://github.com/argoproj/argo-workflows/commit/f481e3b7444eb9cbb5c4402a27ef209818b1d817) feat: fix workflow hangs during executeDAGTask. Fixes #6557 (#8992) + * [ec213c070](https://github.com/argoproj/argo-workflows/commit/ec213c070d92f4ac937f55315feab0fcc108fed5) Fixes #8622: fix http1 keep alive connection leak (#9298) + * [0d77f5554](https://github.com/argoproj/argo-workflows/commit/0d77f5554f251771a175a95fc80eeb12489e42b4) fix: Look in correct bucket when downloading artifacts (Template.ArchiveLocation configured) (#9301) + * [b356cb503](https://github.com/argoproj/argo-workflows/commit/b356cb503863da43c0cc5e1fe667ebf602cb5354) feat: Artifact GC (#9255) + * [e246abec1](https://github.com/argoproj/argo-workflows/commit/e246abec1cbe6be8cb8955f798602faf619a943f) feat: modify "argoexec artifact delete" to handle multiple artifacts. Fixes #9143 (#9291) + * [f359625f6](https://github.com/argoproj/argo-workflows/commit/f359625f6262b6fa93b558f4e488a13652e9f50a) chore(deps-dev): bump @babel/preset-env from 7.18.9 to 7.18.10 in /ui (#9311) + * [ffefe9402](https://github.com/argoproj/argo-workflows/commit/ffefe9402885a275e7a26c12b5a5e52e7522c4d7) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.32 to 7.0.34 (#9304) + * [ee8404bac](https://github.com/argoproj/argo-workflows/commit/ee8404baca5303a6a66f0236aa82464572bded0c) chore(deps-dev): bump @babel/core from 7.18.9 to 7.18.10 in /ui (#9310) + * [028851d7f](https://github.com/argoproj/argo-workflows/commit/028851d7f832be5687048fbec20d4d47ef910d26) chore(deps-dev): bump sass from 1.54.0 to 1.54.3 in /ui (#9309) + * [c0d26d61c](https://github.com/argoproj/argo-workflows/commit/c0d26d61c02f7fb4140a089139f8984df91eaaf9) chore(deps): bump cron-parser from 4.5.0 to 4.6.0 in /ui (#9307) + * [8d06a83bc](https://github.com/argoproj/argo-workflows/commit/8d06a83bccba87886163143e959369f0d0240943) chore(deps): bump github.com/prometheus/client_golang from 1.12.2 to 1.13.0 (#9306) + * [f83346959](https://github.com/argoproj/argo-workflows/commit/f83346959cf5204fe80b6b70e4d823bf481579fe) chore(deps): bump google.golang.org/api from 0.90.0 to 0.91.0 (#9305) + * [63876713e](https://github.com/argoproj/argo-workflows/commit/63876713e809ceca8e1e540a38b5ad0e650cbb2a) chore(deps): bump github.com/tidwall/gjson from 1.14.1 to 1.14.2 (#9303) + * [06b0a8cce](https://github.com/argoproj/argo-workflows/commit/06b0a8cce637db1adae0bae91670e002cfd0ae4d) fix(gcs): Wrap errors using `%w` to make retrying work (#9280) + * [083f3a21a](https://github.com/argoproj/argo-workflows/commit/083f3a21a601e086ca48d2532463a858cc8b316b) fix: pass correct error obj for azure blob failures (#9276) + * [55d15aeb0](https://github.com/argoproj/argo-workflows/commit/55d15aeb03847771e2b48f11fa84f88ad1df3e7c) feat: support zip for output artifacts archive. Fixes #8861 (#8973) + * [a51e833d9](https://github.com/argoproj/argo-workflows/commit/a51e833d9eea18ce5ef7606e55ddd025efa85de1) chore(deps): bump google.golang.org/api from 0.89.0 to 0.90.0 (#9260) + * [c484c57f1](https://github.com/argoproj/argo-workflows/commit/c484c57f13f6316bbf5ac7e98c1216ba915923c7) chore(deps-dev): bump @fortawesome/fontawesome-free from 6.1.1 to 6.1.2 in /ui (#9261) + * [2d1758fe9](https://github.com/argoproj/argo-workflows/commit/2d1758fe90fd60b37d0dfccb55c3f79d8a897289) fix: retryStrategy.Limit is now read properly for backoff strategy. Fixes #9170. (#9213) + * [b565bf358](https://github.com/argoproj/argo-workflows/commit/b565bf35897f529bbb446058c24b72d506024e29) Fix: user namespace override (Fixes #9266) (#9267) + * [0c24ca1ba](https://github.com/argoproj/argo-workflows/commit/0c24ca1ba8a5c38c846d595770e16398f6bd84a5) fix: TestParallel 503 with external url (#9265) + * [fd6c7a7ec](https://github.com/argoproj/argo-workflows/commit/fd6c7a7ec1f2053f9fdd03451d7d29b1339c0408) feat: Add custom event aggregator function with annotations (#9247) + * [be6ba4f77](https://github.com/argoproj/argo-workflows/commit/be6ba4f772f65588af7c79cc9351ff6dea63ed16) fix: add ServiceUnavailable to s3 transient errors list Fixes #9248 (#9249) + * [51538235c](https://github.com/argoproj/argo-workflows/commit/51538235c7a70b89855dd3b96d97387472bdbade) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.31 to 7.0.32 (#9253) + * [5cf5150ef](https://github.com/argoproj/argo-workflows/commit/5cf5150efe1694bb165e98c1d7509f9987d4f524) chore(deps): bump cloud.google.com/go/storage from 1.22.1 to 1.24.0 (#9252) + * [454f19ac8](https://github.com/argoproj/argo-workflows/commit/454f19ac8959f3e0db87bb34ec8f7099558aa737) chore(deps): bump google.golang.org/api from 0.87.0 to 0.89.0 (#9251) + * [e19d73f64](https://github.com/argoproj/argo-workflows/commit/e19d73f64af073bdd7778674c72a1d197c0836f6) chore(deps-dev): bump @babel/core from 7.18.6 to 7.18.9 in /ui (#9218) + * [073431310](https://github.com/argoproj/argo-workflows/commit/07343131080ab125da7ed7d33dbf2d7e0e21362a) chore(deps-dev): bump sass from 1.53.0 to 1.54.0 in /ui (#9219) + * [aa6aaf753](https://github.com/argoproj/argo-workflows/commit/aa6aaf7539ed86f08c43d4a59eb42337aea86ce6) chore(deps-dev): bump @babel/preset-env from 7.18.6 to 7.18.9 in /ui (#9216) + * [6f8592228](https://github.com/argoproj/argo-workflows/commit/6f8592228668457a8b1db072cc53db2c5b01de55) chore(deps): bump github.com/sirupsen/logrus from 1.8.1 to 1.9.0 (#9214) + * [769896eb5](https://github.com/argoproj/argo-workflows/commit/769896eb5bf0a7d8db1a94b423e5bc16cf09d5aa) feat: APIratelimit headers and doc (#9206) + * [bcb596270](https://github.com/argoproj/argo-workflows/commit/bcb59627072c3b4f0cd1cef12f499ec3d8e87815) ui: remove workflowlist searchbox (#9208) + * [15fdf4903](https://github.com/argoproj/argo-workflows/commit/15fdf4903a05c7854656f59f61a676362fe551c6) fix: line return in devcontainer host file (#9204) + * [44731d671](https://github.com/argoproj/argo-workflows/commit/44731d671d425b0709bab5c5e27ed7c42a0ee92d) feat: adding new CRD type "ArtifactGCTask" (#9184) + * [d5d4628a3](https://github.com/argoproj/argo-workflows/commit/d5d4628a3573a0e1a75c367243e259844320e021) fix: Set namespace to user namespace obtained from /userinfo service (#9191) + * [e4489f5d1](https://github.com/argoproj/argo-workflows/commit/e4489f5d12c4f62421c87c69d8b997aad71fdea6) feat: log format option for wait and init containers. Fixes #8986 (#9169) + * [573fe98ff](https://github.com/argoproj/argo-workflows/commit/573fe98ffaa119b607bb5d4aafc1fb3c70a4c564) fix: remove unused argument which is triggering in lint (needed for PRs to pass CI) (#9186) + * [1af892133](https://github.com/argoproj/argo-workflows/commit/1af892133cd5b9e6ac22fc61bd4eabd84c568e89) feat: api ratelimiter for argoserver (#8993) + * [0f1d1d9b7](https://github.com/argoproj/argo-workflows/commit/0f1d1d9b7ef9b602b82123a9d92c212b50ac01e1) fix: support RemainingItemCount in archivedWrokflow (#9118) + * [aea581e02](https://github.com/argoproj/argo-workflows/commit/aea581e027fcd0675e785f413e964c588af304ad) fix: Incorrect link to workflows list with the same author (#9173) + * [fd6f3c263](https://github.com/argoproj/argo-workflows/commit/fd6f3c263412a1174de723470a14721b220c4651) feat: Add support for Azure Blob Storage artifacts Fixes #1540 (#9026) + * [26ff2e8a1](https://github.com/argoproj/argo-workflows/commit/26ff2e8a17ff68628090e18a3f246ab87fe950a3) chore(deps): bump google.golang.org/api from 0.86.0 to 0.87.0 (#9157) + * [877f36f37](https://github.com/argoproj/argo-workflows/commit/877f36f370d7ef00a1b8f136bb157e64c1e2769a) fix: Workflow details accessing undefined templateRef. Fixes #9167 (#9168) + * [6c20202ca](https://github.com/argoproj/argo-workflows/commit/6c20202cae8e62bb6c04a067a269e964d181e864) feat: make node info side panel resizable. Fixes #8917 (#8963) + * [19db1d35e](https://github.com/argoproj/argo-workflows/commit/19db1d35e3f1be55ca8e7ddc5040b9eaf4ac3f4b) chore(deps-dev): bump babel-jest from 28.1.2 to 28.1.3 in /ui (#9159) + * [96b98dafb](https://github.com/argoproj/argo-workflows/commit/96b98dafbdde5770d4d92c469e13ca81734a753f) chore(deps): bump github.com/prometheus/common from 0.35.0 to 0.37.0 (#9158) + * [4dc0e83ea](https://github.com/argoproj/argo-workflows/commit/4dc0e83ea091990e2a02dd8a2b542035ebe98d9a) chore(deps-dev): bump webpack-dev-server from 4.9.2 to 4.9.3 in /ui (#9105) + * [cbe17105d](https://github.com/argoproj/argo-workflows/commit/cbe17105d91517f37cafafb49ad5f422b895c239) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.30 to 7.0.31 (#9130) + * [a9c36e723](https://github.com/argoproj/argo-workflows/commit/a9c36e723c0ab44baf3ea0cdf4706fc4b8bf848a) chore(deps-dev): bump @types/swagger-ui-react from 3.23.2 to 4.11.0 in /ui (#9132) + * [9bbf7e0f0](https://github.com/argoproj/argo-workflows/commit/9bbf7e0f092f0d76c7419d291d3f9dba016b2f3c) feat: Support overriding parameters when retry/resubmit workflows (#9141) + * [42729ff75](https://github.com/argoproj/argo-workflows/commit/42729ff7542760bd27b08a7347a603d8f232466e) fix: Workflow retry should also reset the selected nodes (#9156) + * [559b59c0a](https://github.com/argoproj/argo-workflows/commit/559b59c0a2b9b3254740edf634de8a1c63c84ab0) feat: report Artifact GC failures in user interface. Fixes #8518 (#9115) + * [56d0c664a](https://github.com/argoproj/argo-workflows/commit/56d0c664ad96c95ca6c2311b2d1559dd423a5e4d) fix: Do not error when getting log artifacts from GCS. Fixes #8746 (#9155) + * [2b92b1aef](https://github.com/argoproj/argo-workflows/commit/2b92b1aefbf1e6a12476b946f05559c9b05fffef) fix: Fixed swagger error. Fixes #8922 (#9078) + * [57bac335a](https://github.com/argoproj/argo-workflows/commit/57bac335afac2c28a4eb5ccf1fa97bb5bba63e97) feat: refactoring e2e test timeouts to support multiple environments. (#8925) + * [921ae1ebf](https://github.com/argoproj/argo-workflows/commit/921ae1ebf5f849d4f684c79dee375205f05cfca9) chore(deps): bump moment from 2.29.3 to 2.29.4 in /ui (#9131) + * [c149dc53c](https://github.com/argoproj/argo-workflows/commit/c149dc53c78571778b0589d977dd0445e75d9eec) chore(deps): bump github.com/stretchr/testify from 1.7.5 to 1.8.0 (#9097) + * [a0c9e66c1](https://github.com/argoproj/argo-workflows/commit/a0c9e66c1d1cb3d83c5150814c4b8ccd9acdcfb1) chore(deps): bump react-monaco-editor from 0.48.0 to 0.49.0 in /ui (#9104) + * [0f0e25e03](https://github.com/argoproj/argo-workflows/commit/0f0e25e03ffe00f79e74087044ecd080f2d6242a) [Snyk] Upgrade swagger-ui-react from 4.10.3 to 4.12.0 (#9072) + * [8fc78ca9d](https://github.com/argoproj/argo-workflows/commit/8fc78ca9dce321f2173fba7735e4b4bd48df1b6c) chore(deps): bump cronstrue from 1.125.0 to 2.11.0 in /ui (#9102) + * [01e9ef78f](https://github.com/argoproj/argo-workflows/commit/01e9ef78f9cd81d3e0ea4c85e33abd181118868c) chore(deps-dev): bump @babel/core from 7.18.5 to 7.18.6 in /ui (#9100) + * [50a4d0044](https://github.com/argoproj/argo-workflows/commit/50a4d00443cfc53976db6227394784bbf34fe239) feat: Support retry on nested DAG and node groups (#9028) + * [20f8582a9](https://github.com/argoproj/argo-workflows/commit/20f8582a9e71effee220b160b229b5fd68bf7c95) feat(ui): Add workflow author information to workflow summary and drawer (#9119) + * [18be9593e](https://github.com/argoproj/argo-workflows/commit/18be9593e76bdeb456b5de5ea047a6aa8d201d74) chore(deps-dev): bump babel-jest from 28.1.1 to 28.1.2 in /ui (#9103) + * [154d849b3](https://github.com/argoproj/argo-workflows/commit/154d849b32082a4211487b6dbebbae215b97b9ee) chore(deps): bump cron-parser from 4.4.0 to 4.5.0 in /ui (#9101) + * [801216c44](https://github.com/argoproj/argo-workflows/commit/801216c44053343020f41a9953a5ed1722b36232) chore(deps-dev): bump @babel/preset-env from 7.18.2 to 7.18.6 in /ui (#9099) + * [ba225d3aa](https://github.com/argoproj/argo-workflows/commit/ba225d3aa586dd9e6770ec1b2f482f1c15fe2add) chore(deps): bump google.golang.org/api from 0.85.0 to 0.86.0 (#9096) + * [ace228486](https://github.com/argoproj/argo-workflows/commit/ace2284869a9574602b602a5bdf4592cd6ae8376) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.29 to 7.0.30 (#9098) + * [3967929cf](https://github.com/argoproj/argo-workflows/commit/3967929cfde54c2a3c62c47fd509beaea1832ea4) chore(deps): bump dependabot/fetch-metadata from 1.3.1 to 1.3.3 (#9095) + * [f69cb89b1](https://github.com/argoproj/argo-workflows/commit/f69cb89b16bce0b88b63ec3fec14d7abc0b32fef) docs(workflow/artifacts/gcs): correct spelling of BUCKET (#9082) + * [61211f9db](https://github.com/argoproj/argo-workflows/commit/61211f9db1568190dd46b7469fa79eb6530bba73) fix: Add workflow failures before hooks run. Fixes #8882 (#9009) + * [c1154ff97](https://github.com/argoproj/argo-workflows/commit/c1154ff975bcb580554f78f393fd908b1f64ea6a) feat: redirect to archive on workflow absence. Fixes #7745 (#7854) + * [f5f1a3438](https://github.com/argoproj/argo-workflows/commit/f5f1a34384ab4bbbebd9863711a3047a08ced7fb) fix: sync lock should be released only if we're retrying (#9063) + * [146e38a3f](https://github.com/argoproj/argo-workflows/commit/146e38a3f91ac8a7b9b749d96c54bd3eab2ce1ab) chore!: Remove dataflow pipelines from codebase (#9071) + * [92eaadffc](https://github.com/argoproj/argo-workflows/commit/92eaadffcd0c244f05b23d4f177fd53f000b1a99) feat: inform users on UI if an artifact will be deleted. Fixes #8667 (#9056) + * [d0cfc6d10](https://github.com/argoproj/argo-workflows/commit/d0cfc6d10b11d9977007bb14373e699e604c1b74) feat: UI default to the namespace associated with ServiceAccount. Fixes #8533 (#9008) + * [1ccc120cd](https://github.com/argoproj/argo-workflows/commit/1ccc120cd5392f877ecbb328cbf5304e6eb89783) feat: added support for binary HTTP template bodies. Fixes #6888 (#8087) + * [443155dea](https://github.com/argoproj/argo-workflows/commit/443155deaa1aa9e19688de0580840bd0f8598dd5) feat: If artifact has been deleted, show a message to that effect in the iFrame in the UI (#8966) + * [cead295fe](https://github.com/argoproj/argo-workflows/commit/cead295fe8b4cdfbc7eeb3c2dcfa99e2bfb291b6) chore(deps-dev): bump @types/superagent from 3.8.3 to 4.1.15 in /ui (#9057) + * [b1e49a471](https://github.com/argoproj/argo-workflows/commit/b1e49a471c7de65a628ac496a4041a2ec9975eb0) chore(deps-dev): bump html-webpack-plugin from 3.2.0 to 4.5.2 in /ui (#9036) + * [11801d044](https://github.com/argoproj/argo-workflows/commit/11801d044cfddfc8100d973e91ddfe9a1252a028) chore(deps): bump superagent from 7.1.6 to 8.0.0 in /ui (#9052) + * [c30493d72](https://github.com/argoproj/argo-workflows/commit/c30493d722c2fd9aa5ccc528327759d96f99fb23) chore(deps): bump github.com/prometheus/common from 0.34.0 to 0.35.0 (#9049) + * [74c1e86b8](https://github.com/argoproj/argo-workflows/commit/74c1e86b8bc302780f36a364d7adb98184bf6e45) chore(deps): bump google.golang.org/api from 0.83.0 to 0.85.0 (#9044) + * [77be291da](https://github.com/argoproj/argo-workflows/commit/77be291da21c5057d0c966adce449a7f9177e0db) chore(deps): bump github.com/stretchr/testify from 1.7.2 to 1.7.5 (#9045) + * [278f61c46](https://github.com/argoproj/argo-workflows/commit/278f61c46309b9df07ad23497a4fd97817af93cc) chore(deps): bump github.com/spf13/cobra from 1.4.0 to 1.5.0 (#9047) + * [e288dfc89](https://github.com/argoproj/argo-workflows/commit/e288dfc8963fdd5e5bff8d7cbed5d227e76afd7b) Revert "chore(deps-dev): bump raw-loader from 0.5.1 to 4.0.2 in /ui (#9034)" (#9041) + * [b9318ba93](https://github.com/argoproj/argo-workflows/commit/b9318ba939defe5fdeb46dcbfc44bc8f7cf14a6d) chore(deps-dev): bump webpack-cli from 4.9.2 to 4.10.0 in /ui (#9037) + * [891a256a2](https://github.com/argoproj/argo-workflows/commit/891a256a2165a853bc18e5f068d870a232b671f3) chore(deps-dev): bump sass from 1.52.1 to 1.53.0 in /ui (#9038) + * [db73db04d](https://github.com/argoproj/argo-workflows/commit/db73db04d033cc5a4e2f113fd090afe773ebcb81) chore(deps-dev): bump @babel/core from 7.18.2 to 7.18.5 in /ui (#9031) + * [fa93a6558](https://github.com/argoproj/argo-workflows/commit/fa93a655834138fc549f67f8a4eadd8df7a18c50) chore(deps-dev): bump babel-jest from 28.1.0 to 28.1.1 in /ui (#9035) + * [aeed837be](https://github.com/argoproj/argo-workflows/commit/aeed837be8083b8f49242635f3baa1b162a8db8b) chore(deps-dev): bump webpack-dev-server from 4.9.0 to 4.9.2 in /ui (#9032) + * [e7d3308ef](https://github.com/argoproj/argo-workflows/commit/e7d3308ef4f755d484c8ca6cf90993a5e1d7f954) chore(deps-dev): bump raw-loader from 0.5.1 to 4.0.2 in /ui (#9034) + * [d90f11c3e](https://github.com/argoproj/argo-workflows/commit/d90f11c3e4c1f7d88be3220f57c3184d7beaddaf) [Snyk] Upgrade superagent from 7.1.3 to 7.1.4 (#9020) + * [6e962fdca](https://github.com/argoproj/argo-workflows/commit/6e962fdcab5effbb4ac12180249019d7d6241b8c) feat: sanitize config links (#8779) + * [89f3433bf](https://github.com/argoproj/argo-workflows/commit/89f3433bf7cbca7092952aa8ffc5e5c254f28999) fix: workflow.status is now set properly in metrics. Fixes #8895 (#8939) + * [2aa32aea5](https://github.com/argoproj/argo-workflows/commit/2aa32aea5eaf325bc6a3eff852f2ff0052366bf6) fix: check for nil, and add logging to expose root cause of panic in Issue 8968 (#9010) + * [62287487a](https://github.com/argoproj/argo-workflows/commit/62287487a0895a457804f0ac97fdf9c9413dd2ab) fix: Treat 'connection reset by peer' as a transient network error. Fixes #9013 (#9017) + * [2e3177617](https://github.com/argoproj/argo-workflows/commit/2e31776175b2cbb123278920e30807244e2f7a3b) fix: add nil check for retryStrategy.Limit in deadline check. Fixes #8990 (#8991) + * [73487fbee](https://github.com/argoproj/argo-workflows/commit/73487fbeeb645ac8f6229f98aed2ec6eec756571) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.27 to 7.0.29 (#9004) + * [e34e378af](https://github.com/argoproj/argo-workflows/commit/e34e378af05b0ffde14b89e8d9eec9964a903002) chore(deps): bump github.com/argoproj/pkg from 0.13.2 to 0.13.3 (#9002) + * [89f82cea4](https://github.com/argoproj/argo-workflows/commit/89f82cea4b3f3f40d1666d2469ab3a97e3665fdd) feat: log workflow size before hydrating/dehydrating. Fixes #8976 (#8988) + * [a1535fa44](https://github.com/argoproj/argo-workflows/commit/a1535fa446d15bae56656d20577fdbb000353cc2) fix: Workflow Duration metric shouldn't increase after workflow complete (#8989) + * [6106ac722](https://github.com/argoproj/argo-workflows/commit/6106ac7229eeaac9132f8df595b569de2bc68ccf) feat: Support loading manifest from artifacts for resource templates. Fixes #5878 (#8657) + * [e0a1afa91](https://github.com/argoproj/argo-workflows/commit/e0a1afa91d8e51ba2c6aed6c604f2a69bdb1b387) fix: sync cluster Workflow Template Informer before it's used (#8961) + * [1ed1ee114](https://github.com/argoproj/argo-workflows/commit/1ed1ee114b2069d9cdeb9fd1f3a7513f9f13a396) chore(deps): bump actions/setup-python from 3 to 4 (#8949) + * [6c244f3cb](https://github.com/argoproj/argo-workflows/commit/6c244f3cb400f69b641d7e59c5215806a2870604) fix: long code blocks overflow in ui. Fixes #8916 (#8947) + * [e31ffcd33](https://github.com/argoproj/argo-workflows/commit/e31ffcd339370d6000f86d552845d7d378620d29) fix: Correct kill command. Fixes #8687 (#8908) + * [263977967](https://github.com/argoproj/argo-workflows/commit/263977967a47f24711b9f6110fe950c47d8c5f08) chore(deps): bump google.golang.org/api from 0.82.0 to 0.83.0 (#8951) + * [e96b1b3fd](https://github.com/argoproj/argo-workflows/commit/e96b1b3fd4e27608de8a94763782bd2d41cd5761) chore(deps): bump github.com/stretchr/testify from 1.7.1 to 1.7.2 (#8950) + * [107ed932d](https://github.com/argoproj/argo-workflows/commit/107ed932de466a89feb71dc04950c86d98747cc5) feat: add indexes for improve archived workflow performance. Fixes #8836 (#8860) + * [1d4edb433](https://github.com/argoproj/argo-workflows/commit/1d4edb4333ce4e5efeb44a199b390c3d9d02fc25) feat: Date range filter for workflow list. Fixes #8329 (#8596) + * [a6eef41bf](https://github.com/argoproj/argo-workflows/commit/a6eef41bf961cda347b9a9bd8476fc33e3a467a9) feat: add artifact delete to argoexec CLI. Fixes #8669 (#8913) + * [416fce705](https://github.com/argoproj/argo-workflows/commit/416fce70543059cc81753ba5131b1661a13a0fed) fix: Fork sub-process. Fixes #8454 (#8906) + * [750c4e1f6](https://github.com/argoproj/argo-workflows/commit/750c4e1f699b770a309843f2189b4e703305e44f) fix: Only signal running containers, ignore failures. (#8909) + * [ede1a39e7](https://github.com/argoproj/argo-workflows/commit/ede1a39e7cb48890aa5d4c8221e2c9d94e7ef007) fix: workflowMetadata needs to be loaded into globalParams in both ArgoServer and Controller (#8907) + * [df3764925](https://github.com/argoproj/argo-workflows/commit/df37649251f5791c40802defd923dd735924eb3a) Add left-margin to the question circle next to parameter name in Submit Workflow Panel (#8927) + * [1e17f7ff5](https://github.com/argoproj/argo-workflows/commit/1e17f7ff5232067c9c1c05bfa55322e41e0915d7) chore(deps): bump google.golang.org/api from 0.81.0 to 0.82.0 (#8914) + * [7dacb5bca](https://github.com/argoproj/argo-workflows/commit/7dacb5bcaeae8e3be64bb1fbf54024401d42d867) fix: Fixed Swagger error. Fixes #8830 (#8886) + * [8592e9ce6](https://github.com/argoproj/argo-workflows/commit/8592e9ce6e4de64e55c23bfda460b0cad67e74f7) feat: enable gcflags (compiler flags) to be passed into 'go build' (#8896) + * [7a626aa6a](https://github.com/argoproj/argo-workflows/commit/7a626aa6a1368da59c322f1d768e691b0ee4d7e4) feat: add Artifact.Deleted (#8893) + * [f2c748ac4](https://github.com/argoproj/argo-workflows/commit/f2c748ac44ed41b1d672e6c45a34090992b979d7) feat: Artifact GC Finalizer needs to be added if any Output Artifacts have a strategy (#8856) + * [093a6fe7e](https://github.com/argoproj/argo-workflows/commit/093a6fe7e1b1926f5feaff07a66edb9ff036f866) Add Orchest to ecosystem (#8884) + * [2b5ae622b](https://github.com/argoproj/argo-workflows/commit/2b5ae622bc257a4dafb4fab961e8142accaa484d) Removed Security Nudge and all its invocations (#8838) + * [86ab55726](https://github.com/argoproj/argo-workflows/commit/86ab55726e213bc406e69edb14921b501938fa25) chore(deps-dev): bump monaco-editor-webpack-plugin from 1.9.0 to 1.9.1 in /ui (#8877) + * [df750d715](https://github.com/argoproj/argo-workflows/commit/df750d7158f7291983aeffe709b7624eb73f964a) chore(deps-dev): bump @babel/preset-env from 7.18.0 to 7.18.2 in /ui (#8876) + * [f0447918d](https://github.com/argoproj/argo-workflows/commit/f0447918d6826b21a8e0cf0d0d218113e69059a8) chore(deps): bump github.com/spf13/viper from 1.11.0 to 1.12.0 (#8874) + * [8b7bdb713](https://github.com/argoproj/argo-workflows/commit/8b7bdb7139e8aa152e95ad3fe6815e7a801afcbb) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.26 to 7.0.27 (#8875) + * [282a72295](https://github.com/argoproj/argo-workflows/commit/282a722950b113008b4efb258309cc4066f925a0) add pismo.io to argo users (#8871) + * [1a517e6f5](https://github.com/argoproj/argo-workflows/commit/1a517e6f5b801feae9416acf824c83ff65dea65c) chore(deps): bump superagent from 3.8.3 to 7.1.3 in /ui (#8851) + * [53012fe66](https://github.com/argoproj/argo-workflows/commit/53012fe66fb6afcefcf4b237c34264a600ae6804) chore(deps-dev): bump source-map-loader from 0.2.4 to 1.1.3 in /ui (#8850) + * [35eb2bb96](https://github.com/argoproj/argo-workflows/commit/35eb2bb96d1489366e9813c14863a79db4ea85df) chore(deps-dev): bump file-loader from 6.0.0 to 6.2.0 in /ui (#8848) + * [116dfdb03](https://github.com/argoproj/argo-workflows/commit/116dfdb039611d70dd98aef7eb4428b589d55361) chore(deps-dev): bump @fortawesome/fontawesome-free from 5.15.3 to 6.1.1 in /ui (#8846) + * [7af70ff39](https://github.com/argoproj/argo-workflows/commit/7af70ff3926e0400d2fe5260f0ea2eeb8bc9bf53) chore(deps-dev): bump glob from 7.1.6 to 8.0.3 in /ui (#8845) + * [67dab5d85](https://github.com/argoproj/argo-workflows/commit/67dab5d854a4b1be693571765eae3857559851c6) chore(deps): bump cron-parser from 2.18.0 to 4.4.0 in /ui (#8844) + * [e7d294214](https://github.com/argoproj/argo-workflows/commit/e7d2942148ed876717b24fcd2b8af7735e977cb0) chore(deps-dev): bump @babel/core from 7.12.10 to 7.18.2 in /ui (#8843) + * [f676ac59a](https://github.com/argoproj/argo-workflows/commit/f676ac59a0794791dc5bdfd74acd9764110f2d2a) chore(deps): bump google.golang.org/api from 0.80.0 to 0.81.0 (#8841) + * [d324faaf8](https://github.com/argoproj/argo-workflows/commit/d324faaf885d32e8666a70e1f20bae7e71db386e) chore(deps): bump github.com/aliyun/aliyun-oss-go-sdk from 2.2.2+incompatible to 2.2.4+incompatible (#8842) + * [40ab51766](https://github.com/argoproj/argo-workflows/commit/40ab51766aa7cb511dcc3533aeb917379e6037ad) Revert "chore(deps-dev): bump style-loader from 0.20.3 to 2.0.0 in /ui" (#8839) + * [cc9d14cf0](https://github.com/argoproj/argo-workflows/commit/cc9d14cf0d60812e177ebb447181df933199b722) feat: Use Pod Names v2 by default (#8748) + * [c0490ec04](https://github.com/argoproj/argo-workflows/commit/c0490ec04be88975c316ff6a9dc007861c8f9254) chore(deps-dev): bump webpack-cli from 3.3.11 to 4.9.2 in /ui (#8726) + * [bc4a80a8d](https://github.com/argoproj/argo-workflows/commit/bc4a80a8d63f869a7a607861374e0c206873f250) feat: remove size limit of 128kb for workflow templates. Fixes #8789 (#8796) + * [5c91d93af](https://github.com/argoproj/argo-workflows/commit/5c91d93afd07f207769a63730ec72e9a93b584ce) chore(deps-dev): bump @babel/preset-env from 7.12.11 to 7.18.0 in /ui (#8825) + * [d61bea949](https://github.com/argoproj/argo-workflows/commit/d61bea94947526e7ca886891152c565cc15abded) chore(deps): bump js-yaml and @types/js-yaml in /ui (#8823) + * [4688afcc5](https://github.com/argoproj/argo-workflows/commit/4688afcc51c50edc27eaba92c449bc4bce00a139) chore(deps-dev): bump webpack-dev-server from 3.11.3 to 4.9.0 in /ui (#8818) + * [14ac0392c](https://github.com/argoproj/argo-workflows/commit/14ac0392ce79bddbb9fc44c86fcf315ea1746235) chore(deps): bump cloud.google.com/go/storage from 1.22.0 to 1.22.1 (#8816) + * [3a21fb8a4](https://github.com/argoproj/argo-workflows/commit/3a21fb8a423047268a50fba22dcdd2b4d4029944) chore(deps-dev): bump tslint from 5.11.0 to 5.20.1 in /ui (#8822) + * [eca4bdc49](https://github.com/argoproj/argo-workflows/commit/eca4bdc493332eeaf626f454fb25f1ec5257864a) chore(deps-dev): bump copyfiles from 1.2.0 to 2.4.1 in /ui (#8821) + * [3416253be](https://github.com/argoproj/argo-workflows/commit/3416253be1047d5c6e6c0cb69defd92ee7eea5fe) chore(deps-dev): bump style-loader from 0.20.3 to 2.0.0 in /ui (#8820) + * [e9ea8ee69](https://github.com/argoproj/argo-workflows/commit/e9ea8ee698d8b0d173d0039eba66b2a017d650d3) chore(deps-dev): bump sass from 1.30.0 to 1.52.1 in /ui (#8817) + * [ac92a49d0](https://github.com/argoproj/argo-workflows/commit/ac92a49d0f253111bd14bd72699ca3ad8cbeee1d) chore(deps): bump google.golang.org/api from 0.79.0 to 0.80.0 (#8815) + * [1bd841853](https://github.com/argoproj/argo-workflows/commit/1bd841853633ebb71fc569b2975def90afb1a68d) docs(running-locally): update dependencies info (#8810) + * [bc0100346](https://github.com/argoproj/argo-workflows/commit/bc01003468186ddcb93d1d32e9a49a75046827e7) fix: Change to distroless. Fixes #8805 (#8806) + * [872826591](https://github.com/argoproj/argo-workflows/commit/8728265915fd7c18f05f32e32dc12de1ef3ca46b) Revert "chore(deps-dev): bump style-loader from 0.20.3 to 2.0.0 in /u… (#8804) + * [fbb8246cd](https://github.com/argoproj/argo-workflows/commit/fbb8246cdc44d218f70f0de677be0f4dfd0780cf) fix: set NODE_OPTIONS to no-experimental-fetch to prevent yarn start error (#8802) + * [39fbdb2a5](https://github.com/argoproj/argo-workflows/commit/39fbdb2a551482c5ae2860fd266695c0113cb7b7) fix: fix a command in the quick-start page (#8782) + * [961f731b7](https://github.com/argoproj/argo-workflows/commit/961f731b7e9cb60490dd763a394893154c0b3c60) fix: Omitted task result should also be valid (#8776) + * [67cdd5f97](https://github.com/argoproj/argo-workflows/commit/67cdd5f97a16041fd1ec32134158c71c07249e4d) chore(deps-dev): bump babel-loader from 8.2.2 to 8.2.5 in /ui (#8767) + * [fce407663](https://github.com/argoproj/argo-workflows/commit/fce40766351440375e6b2761cd6a304474764b9a) chore(deps-dev): bump babel-jest from 26.6.3 to 28.1.0 in /ui (#8774) + * [026298671](https://github.com/argoproj/argo-workflows/commit/02629867180367fb21a347c3a36cf2d52b63a2c3) chore(deps-dev): bump style-loader from 0.20.3 to 2.0.0 in /ui (#8775) + * [2e1fd11db](https://github.com/argoproj/argo-workflows/commit/2e1fd11db5bbb95ee9bcdbeaeab970fa92fc3588) chore(deps-dev): bump webpack from 4.35.0 to 4.46.0 in /ui (#8768) + * [00bda0b06](https://github.com/argoproj/argo-workflows/commit/00bda0b0690ea24fa52603f30eecb40fe8b5cdd7) chore(deps-dev): bump @types/prop-types from 15.5.4 to 15.7.5 in /ui (#8773) + * [28b494a67](https://github.com/argoproj/argo-workflows/commit/28b494a674e560a07e5a1c98576a94bbef111fc5) chore(deps-dev): bump @types/dagre from 0.7.44 to 0.7.47 in /ui (#8772) + * [b07a57694](https://github.com/argoproj/argo-workflows/commit/b07a576945e87915e529d718101319d2f83cd98a) chore(deps): bump react-monaco-editor from 0.47.0 to 0.48.0 in /ui (#8770) + * [2a0ac29d2](https://github.com/argoproj/argo-workflows/commit/2a0ac29d27466a247c3a4fee0429d95aa5b67338) chore(deps-dev): bump webpack-dev-server from 3.7.2 to 3.11.3 in /ui (#8769) + * [6b11707f5](https://github.com/argoproj/argo-workflows/commit/6b11707f50301a125eb8349193dd0be8659a4cdf) chore(deps): bump github.com/coreos/go-oidc/v3 from 3.1.0 to 3.2.0 (#8765) + * [d23693166](https://github.com/argoproj/argo-workflows/commit/d236931667a60266f87fbc446064ceebaf582996) chore(deps): bump github.com/prometheus/client_golang from 1.12.1 to 1.12.2 (#8763) + * [f6d84640f](https://github.com/argoproj/argo-workflows/commit/f6d84640fda435e08cc6a961763669b7572d0e69) fix: Skip TestExitHookWithExpression() completely (#8761) + * [178bbbc31](https://github.com/argoproj/argo-workflows/commit/178bbbc31c594f9ded4b8a66b0beecbb16cfa949) fix: Temporarily fix CI build. Fixes #8757. (#8758) + * [6b9dc2674](https://github.com/argoproj/argo-workflows/commit/6b9dc2674f2092b2198efb0979e5d7e42efffc30) feat: Add WebHDFS support for HTTP artifacts. Fixes #7540 (#8468) + * [354dee866](https://github.com/argoproj/argo-workflows/commit/354dee86616014bcb77afd170685242a18efd07c) fix: Exit lifecycle hook should respect expression. Fixes #8742 (#8744) + * [aa366db34](https://github.com/argoproj/argo-workflows/commit/aa366db345d794f0d330336d51eb2a88f14ebbe6) fix: remove list and watch on secrets. Fixes #8534 (#8555) + * [342abcd6d](https://github.com/argoproj/argo-workflows/commit/342abcd6d72b4cda64b01f30fa406b2f7b86ac6d) fix: mkdocs uses 4space indent for nested list (#8740) + * [567436640](https://github.com/argoproj/argo-workflows/commit/5674366404a09cee5f4e36e338a4292b057fe1b9) chore(deps-dev): bump typescript from 3.9.2 to 4.6.4 in /ui (#8719) + * [1f2417e30](https://github.com/argoproj/argo-workflows/commit/1f2417e30937399e96fd4dfcd3fcc2ed7333291a) feat: running locally through dev container (#8677) + * [515e0763a](https://github.com/argoproj/argo-workflows/commit/515e0763ad4b1bd9d2941fc5c141c52691fc3b12) fix: Simplify return logic in executeTmplLifeCycleHook (#8736) + * [b8f511309](https://github.com/argoproj/argo-workflows/commit/b8f511309adf6443445e6dbf55889538fd39eacc) fix: Template in Lifecycle hook should be optional (#8735) + * [98a97d6d9](https://github.com/argoproj/argo-workflows/commit/98a97d6d91c0d9d83430da20e11cea39a0a7919b) chore(deps-dev): bump ts-node from 4.1.0 to 9.1.1 in /ui (#8722) + * [e4d35f0ad](https://github.com/argoproj/argo-workflows/commit/e4d35f0ad3665d7d732a16b9e369f8658049bacd) chore(deps-dev): bump react-hot-loader from 3.1.3 to 4.13.0 in /ui (#8723) + * [b9ec444fc](https://github.com/argoproj/argo-workflows/commit/b9ec444fc4cf60ed876823b25a41f74a28698f0b) chore(deps-dev): bump copy-webpack-plugin from 4.5.2 to 5.1.2 in /ui (#8718) + * [43fb7106a](https://github.com/argoproj/argo-workflows/commit/43fb7106a83634b85a3b934e22a05246e76f7d15) chore(deps-dev): bump tslint-plugin-prettier from 2.1.0 to 2.3.0 in /ui (#8716) + * [c0cd1f855](https://github.com/argoproj/argo-workflows/commit/c0cd1f855a5ef89d0f7a0d49f8e11781735cfa86) feat: ui, Dependabot auto dependency update (#8706) + * [b3bf327a0](https://github.com/argoproj/argo-workflows/commit/b3bf327a021e4ab5cc329f83bdec8f533c87a4d6) fix: Fix the resursive example to call the coinflip template (#8696) + * [427c16072](https://github.com/argoproj/argo-workflows/commit/427c16072b6c9d677265c95f5fd84e6a37fcc848) feat: Increased default significant figures in formatDuration. Fixes #8650 (#8686) + * [7e2df8129](https://github.com/argoproj/argo-workflows/commit/7e2df81299f660089cf676f7622638156affedf5) chore(deps): bump google.golang.org/api from 0.78.0 to 0.79.0 (#8710) + * [9ddae875f](https://github.com/argoproj/argo-workflows/commit/9ddae875fdb49d3e852f935e3d8b52fae585bc5e) fix: Fixed podName in killing daemon pods. Fixes #8692 (#8708) + * [72d3f32e5](https://github.com/argoproj/argo-workflows/commit/72d3f32e5676207d1511c609b00d26df20a2607e) fix: update go-color path/version (#8707) + * [92b3ef27a](https://github.com/argoproj/argo-workflows/commit/92b3ef27af7a7e6b930045e95072a47c8745b1d3) fix: upgrade moment from 2.29.2 to 2.29.3 (#8679) + * [8d4ac38a1](https://github.com/argoproj/argo-workflows/commit/8d4ac38a158dc2b4708478f7e7db1f2dd488ffed) feat: ui, add node version constraint (#8678) + * [2cabddc9a](https://github.com/argoproj/argo-workflows/commit/2cabddc9a9241061d8b89cf671f1c548405f4cb0) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.24 to 7.0.26 (#8673) + * [859ebe99f](https://github.com/argoproj/argo-workflows/commit/859ebe99f760c6fb30870993359274a92cec2fb9) fix: Terminate, rather than delete, deadlined pods. Fixes #8545 (#8620) + * [dd565208e](https://github.com/argoproj/argo-workflows/commit/dd565208e236bc56230e75bedcc5082d171e6155) fix(git): add auth to fetch (#8664) + * [70f70209d](https://github.com/argoproj/argo-workflows/commit/70f70209d693d3933177a7de2cb6e421b763656f) fix: Handle omitted nodes in DAG enhanced depends logic. Fixes #8654 (#8672) + * [3fdf30d9f](https://github.com/argoproj/argo-workflows/commit/3fdf30d9f9181d74d81ca3184b53bbe661ecb845) fix: Enhance artifact visualization. Fixes #8619 (#8655) + * [16fef4e54](https://github.com/argoproj/argo-workflows/commit/16fef4e5498fac88dc80d33d653c99fec641150d) fix: enable `ARGO_REMOVE_PVC_PROTECTION_FINALIZER` by default. Fixes #8592 (#8661) + * [e4d57c6d5](https://github.com/argoproj/argo-workflows/commit/e4d57c6d560e025a336415aa840d2457eeca79f4) feat: `argo cp` to download artifacts. Fixes #695 (#8582) + * [e6e0c9bb3](https://github.com/argoproj/argo-workflows/commit/e6e0c9bb3b923a6d977875cbbd2744b8bacfce15) chore(deps): bump docker/login-action from 1 to 2 (#8642) + * [05781101d](https://github.com/argoproj/argo-workflows/commit/05781101dc94701aabd1bdbc2d3be4aa383b49f2) chore(deps): bump docker/setup-buildx-action from 1 to 2 (#8641) + * [6a4957135](https://github.com/argoproj/argo-workflows/commit/6a495713593f11514500998f6f69ce8f2e463975) chore(deps): bump docker/setup-qemu-action from 1 to 2 (#8640) + * [02370b51d](https://github.com/argoproj/argo-workflows/commit/02370b51d59bdd60b07c6c938737ed997807e4f2) feat: Track UI event #8402 (#8460) + * [64a2b28a5](https://github.com/argoproj/argo-workflows/commit/64a2b28a5fb51b50fe0e0a30185a8c3400d10548) fix: close http body. Fixes #8622 (#8624) + * [68a2cee6a](https://github.com/argoproj/argo-workflows/commit/68a2cee6a3373214803db009c7a6290954107c37) chore(deps): bump google.golang.org/api from 0.77.0 to 0.78.0 (#8602) + * [ed351ff08](https://github.com/argoproj/argo-workflows/commit/ed351ff084c4524ff4b2a45b53e539f91f5d423a) fix: ArtifactGC moved from Template to Artifact. Fixes #8556. (#8581) + * [87470e1c2](https://github.com/argoproj/argo-workflows/commit/87470e1c2bf703a9110e97bb755614ce8757fdcc) fix: Added artifact Content-Security-Policy (#8585) + * [61b80c90f](https://github.com/argoproj/argo-workflows/commit/61b80c90fd93aebff26df73fcddffa75732d10ec) Fix panic on executor plugin eventhandler (#8588) + * [974031570](https://github.com/argoproj/argo-workflows/commit/97403157054cb779b2005991fbb65c583aa3644c) fix: Polish artifact visualisation. Fixes #7743 (#8552) + * [98dd898be](https://github.com/argoproj/argo-workflows/commit/98dd898bef67e8523a0bf2ed942241dcb69eabe7) fix: Correct CSP. Fixes #8560 (#8579) + * [3d892d9b4](https://github.com/argoproj/argo-workflows/commit/3d892d9b481c5eefeb309b462b3f166a31335bc4) feat: New endpoint capable of serving directory listing or raw file, from non-archived or archived workflow (#8548) + * [71e2073b6](https://github.com/argoproj/argo-workflows/commit/71e2073b66b3b30b1eda658e88b7f6fd89469a92) chore(deps): bump lodash-es from 4.17.20 to 4.17.21 in /ui (#8577) + * [abf3c7411](https://github.com/argoproj/argo-workflows/commit/abf3c7411921dd422804c72b4f68dc2ab2731047) chore(deps): bump github.com/argoproj/pkg from 0.13.1 to 0.13.2 (#8571) + * [ffd5544c3](https://github.com/argoproj/argo-workflows/commit/ffd5544c31da026999b78197f55e6f4d2c8d7628) chore(deps): bump google.golang.org/api from 0.76.0 to 0.77.0 (#8572) + * [dc8fef3e5](https://github.com/argoproj/argo-workflows/commit/dc8fef3e5b1c0b833cc8568dbea23dbd1b310bdc) fix: Support memoization on plugin node. Fixes #8553 (#8554) + * [5b8638fcb](https://github.com/argoproj/argo-workflows/commit/5b8638fcb0f6ab0816f58f35a71f4f178ba9b7d9) fix: modified `SearchArtifact` to return `ArtifactSearchResults`. Fixes #8543 (#8557) + * [9398b0717](https://github.com/argoproj/argo-workflows/commit/9398b0717c14e15c78f6fe314ca9168d0104418d) feat: add more options to ArtifactSearchQuery. Fixes #8542. (#8549) + * [c781a5828](https://github.com/argoproj/argo-workflows/commit/c781a582821c4e08416eba9a3889eb2588596aa6) feat: Make artifacts discoverable in the DAG. Fixes #8494 (#8496) + * [d25b3fec4](https://github.com/argoproj/argo-workflows/commit/d25b3fec49377ea4be6a63d815a2b609636ef607) feat: Improve artifact server response codes. Fixes #8516 (#8524) + * [65b7437f7](https://github.com/argoproj/argo-workflows/commit/65b7437f7b26e19581650c0c2078f9dd8c89a73f) chore(deps): bump github.com/argoproj/pkg from 0.13.0 to 0.13.1 (#8537) + * [ecd91b1c4](https://github.com/argoproj/argo-workflows/commit/ecd91b1c4215a2ab8742f7c43eaade98a1d47eba) fix: added json tag to ArtifactGCStrategies (#8523) + * [f223bb8a3](https://github.com/argoproj/argo-workflows/commit/f223bb8a3c277e96a19e08f30f27ad70c0c425d3) fix: ArtifactGCOnWorkflowDeletion typo quick fix (#8519) + * [b4202b338](https://github.com/argoproj/argo-workflows/commit/b4202b338b5f97552fb730e4d07743c365d6f5ec) feat: Do not return cause of internal server error. Fixes #8514 (#8522) + * [d7bcaa756](https://github.com/argoproj/argo-workflows/commit/d7bcaa7569ac15d85eb293a72a1a98779275bd6e) feat: add finalizer for artifact GC (#8513) + * [c3ae56565](https://github.com/argoproj/argo-workflows/commit/c3ae56565bbe05c9809c5ad1192fcfc3ae717114) fix: Do not log container not found (#8509) + * [9a1345323](https://github.com/argoproj/argo-workflows/commit/9a1345323bb4727ba4fa769363b671213c02ded7) feat: Implement Workflow.SearchArtifacts(). Fixes #8473 (#8517) + * [30d9f8d77](https://github.com/argoproj/argo-workflows/commit/30d9f8d77caa69467f2b388b045fe9c3f8d05cb8) feat: Add correct CSP/XFO to served artifacts. Fixing #8492 (#8511) + * [d3f8db341](https://github.com/argoproj/argo-workflows/commit/d3f8db3417586b307401ecd5d172f9a1f97241db) feat: Save `containerSet` logs in artifact repository. Fixes #7897 (#8491) + * [6769ba720](https://github.com/argoproj/argo-workflows/commit/6769ba7209c1c8ffa6ecd5414d9694e743afe557) feat: add ArtifactGC to template spec (#8493) + * [19e763a3b](https://github.com/argoproj/argo-workflows/commit/19e763a3ba7ceaa890dc34310abeb4e7e4555641) chore(deps): bump google.golang.org/api from 0.75.0 to 0.76.0 (#8495) + * [6e9d42aed](https://github.com/argoproj/argo-workflows/commit/6e9d42aed1623e215a04c98cf1632f08f79a45cb) feat: add capability to choose params in suspend node.Fixes #8425 (#8472) + * [8685433e1](https://github.com/argoproj/argo-workflows/commit/8685433e1c183f1eb56add14c3e19c7b676314bb) feat: Added a delete function to the artifacts storage. Fixes #8470 (#8490) + * [9f5759b5b](https://github.com/argoproj/argo-workflows/commit/9f5759b5bd2a01d0f2930faa20ad5a769395eb99) feat: Enable git artifact clone of single branch (#8465) + * [7376e7cda](https://github.com/argoproj/argo-workflows/commit/7376e7cda4f72f0736fc128d15495acff71b987d) feat: Artifact streaming: enable artifacts to be streamed to users rather than loading the full file to disk first. Fixes #8396 (#8486) + * [06e9445ba](https://github.com/argoproj/argo-workflows/commit/06e9445ba71faba6f1132703762ec592a168ca9b) feat: add empty dir into wait container (#8390) + * [c61770622](https://github.com/argoproj/argo-workflows/commit/c6177062276cc39c3b21644ab1d6989cbcaf075c) fix: Pod `OOMKilled` should fail workflow. Fixes #8456 (#8478) + * [37a8a81df](https://github.com/argoproj/argo-workflows/commit/37a8a81df1d7ef3067596199f96974d31b200b88) feat: add ArtifactGC to workflow and template spec. Fixes #8471 (#8482) + * [ae803bba4](https://github.com/argoproj/argo-workflows/commit/ae803bba4f9b0c85f0d0471c22e44eb1c0f8f5f9) fix: Revert controller readiness changes. Fixes #8441 (#8454) + * [147ca4637](https://github.com/argoproj/argo-workflows/commit/147ca46376a4d86a09bde689d848396af6750b1e) fix: PodGC works with WorkflowTemplate. Fixes #8448 (#8452) + * [b7aeb6298](https://github.com/argoproj/argo-workflows/commit/b7aeb62982d91036edf5ba942eebeb4b22e30a3d) feat: Add darwin-arm64 binary build. Fixes #8450 (#8451) + * [8c0a957c3](https://github.com/argoproj/argo-workflows/commit/8c0a957c3ef0149f3f616a8baef2eb9a164436c1) fix: Fix bug in entrypoint lookup (#8453) + * [79508cc78](https://github.com/argoproj/argo-workflows/commit/79508cc78bd5b79762719c3b2fbe970981277e1f) chore(deps): bump google.golang.org/api from 0.74.0 to 0.75.0 (#8447) + * [24f9db628](https://github.com/argoproj/argo-workflows/commit/24f9db628090e9dfdfc7d657af80d96c176a47fd) chore(deps): bump github.com/argoproj/pkg from 0.11.0 to 0.12.0 (#8439) + * [e28fb0744](https://github.com/argoproj/argo-workflows/commit/e28fb0744209529cf0f7562c71f7f645db21ba1a) chore(deps): bump dependabot/fetch-metadata from 1.3.0 to 1.3.1 (#8438) + * [72bb11305](https://github.com/argoproj/argo-workflows/commit/72bb1130543a3cc81347fe4fcf3257d8b35cd478) chore(deps): bump github.com/argoproj-labs/argo-dataflow (#8440) + * [230c82652](https://github.com/argoproj/argo-workflows/commit/230c8265246d50a095cc3a697fcd437174731aa8) feat: added support for http as option for artifact upload. Fixes #785 (#8414) + * [4f067ab4b](https://github.com/argoproj/argo-workflows/commit/4f067ab4bcb9ae570b9af11b2abd64d592e1fbbc) chore(deps): bump github.com/prometheus/common from 0.33.0 to 0.34.0 (#8427) + * [a2fd0031e](https://github.com/argoproj/argo-workflows/commit/a2fd0031ef13b63fd65520c615043e2aff89dde8) chore(deps): bump github.com/tidwall/gjson from 1.14.0 to 1.14.1 (#8426) + * [3d1ea426a](https://github.com/argoproj/argo-workflows/commit/3d1ea426a28c65c206752e957bb68a57ee8ed32e) fix: Remove binaries from Windows image. Fixes #8417 (#8420) + * [e71fdee07](https://github.com/argoproj/argo-workflows/commit/e71fdee07b8ccd7905752808bffb2283e170077a) Revert "feat: added support for http as an option for artifact upload. Fixes #785 (#8405)" + * [5845efbb9](https://github.com/argoproj/argo-workflows/commit/5845efbb94da8acfb218787846ea10c37fb2eebb) feat: Log result of HTTP requests & artifacts load/saves. Closes #8257 (#8394) + * [d22be825c](https://github.com/argoproj/argo-workflows/commit/d22be825cfb901f1ce59ba3744488cb8e144233b) feat: added support for http as an option for artifact upload. Fixes #785 (#8405) + * [4471b59a5](https://github.com/argoproj/argo-workflows/commit/4471b59a52873ca66d6834a06519407c858f5906) fix: open minio dashboard on different port in quick-start (#8407) + * [f467cc555](https://github.com/argoproj/argo-workflows/commit/f467cc5558bd22330eebfbc352ad4a7607f9fa4c) fix: Daemon step updated 'pod delete' while pod is running (#8399) + * [a648ccdcf](https://github.com/argoproj/argo-workflows/commit/a648ccdcfa3bb4cd5f5684faf921ab9fdab761de) fix: prevent backoff when retryStrategy.limit has been reached. Fixes #7588 (#8090) + * [136ebbc45](https://github.com/argoproj/argo-workflows/commit/136ebbc45b7cba346d7ba72f278624647a6b5a1c) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.23 to 7.0.24 (#8397) + * [73ea7c72c](https://github.com/argoproj/argo-workflows/commit/73ea7c72c99a073dbe3ec0a420e112945916fb94) feat!: Add entrypoint lookup. Fixes #8344 (#8345) + * [283f6b58f](https://github.com/argoproj/argo-workflows/commit/283f6b58f979db1747ca23753d0562a440f95908) fix: Add readiness check to controller. Fixes #8283 (#8285) + * [75b533b61](https://github.com/argoproj/argo-workflows/commit/75b533b61eebd00044f2682540f5de15d6be8fbb) chore(deps): bump github.com/spf13/viper from 1.10.1 to 1.11.0 (#8392) + * [b09b9bdfb](https://github.com/argoproj/argo-workflows/commit/b09b9bdfb132c3967b81718bbc3c6e37fb2a3a42) fix: Absolute submodules in git artifacts. Fixes #8377 (#8381) + * [d47081fb4](https://github.com/argoproj/argo-workflows/commit/d47081fb4664d3a26e802a5c3c36798108388f2f) fix: upgrade react-moment from 1.0.0 to 1.1.1 (#8389) + * [010e359e4](https://github.com/argoproj/argo-workflows/commit/010e359e4c29b1af5653c46112ad53ac9b2679be) fix: upgrade react-datepicker from 2.14.1 to 2.16.0 (#8388) + * [0c9d88b44](https://github.com/argoproj/argo-workflows/commit/0c9d88b4429ff59c656e7b78b2160a55b49976ce) fix: upgrade prop-types from 15.7.2 to 15.8.1 (#8387) + * [54fa39c89](https://github.com/argoproj/argo-workflows/commit/54fa39c897d9883cec841450808102d71bd46fa8) fix: Back-off UI retries. Fixes #5697 (#8333) + * [637d14c88](https://github.com/argoproj/argo-workflows/commit/637d14c88f7d12c1c0355d62c2d1d4b03c4934e1) fix: replace `podName` with `nodeId` in `_.primary.swagger.json` (#8385) + * [95323f87d](https://github.com/argoproj/argo-workflows/commit/95323f87d42c9cf878563bfcb11460171906684b) fix: removed error from artifact server 401 response. Fixes #8382 (#8383) + * [2d91646aa](https://github.com/argoproj/argo-workflows/commit/2d91646aafede0e5671b07b2ac6eb27a057455b1) fix: upgrade js-yaml from 3.13.1 to 3.14.1 (#8374) + * [54eaed060](https://github.com/argoproj/argo-workflows/commit/54eaed0604393106b4dde3e7d7e6ccb41a42de6b) fix: upgrade cron-parser from 2.16.3 to 2.18.0 (#8373) + * [e97b0e66b](https://github.com/argoproj/argo-workflows/commit/e97b0e66b89f131fe6a12f24c26efbb73e16ef2e) fix: Updating complated node status + * [627597b56](https://github.com/argoproj/argo-workflows/commit/627597b5616f4d22e88b89a6d7017a67b6a4143d) fix: Add auth for SDKs. Fixes #8230 (#8367) + * [55ecfeb7b](https://github.com/argoproj/argo-workflows/commit/55ecfeb7b0e300a5d5cc6027c9212365cdaf4a2b) chore(deps): bump github.com/go-openapi/jsonreference (#8363) + * [163be6d99](https://github.com/argoproj/argo-workflows/commit/163be6d99cc7ee262580196fbfd2cb9e9d7d8833) chore(deps): bump actions/download-artifact from 2 to 3 (#8360) + * [765bafb12](https://github.com/argoproj/argo-workflows/commit/765bafb12de25a7589aa1e2733786e0285290c22) chore(deps): bump actions/upload-artifact from 2 to 3 (#8361) + * [eafa10de8](https://github.com/argoproj/argo-workflows/commit/eafa10de80d31bbcf1ec030d20ecfe879ab2d171) chore(deps): bump actions/setup-go from 2 to 3 (#8362) + * [e9de085d6](https://github.com/argoproj/argo-workflows/commit/e9de085d65a94d4189a54566d99c7177c1a7d735) fix: Erratum in docs. Fixes #8342 (#8359) + * [a3d1d07e1](https://github.com/argoproj/argo-workflows/commit/a3d1d07e1cbd19039771c11aa202bd8fd68198e7) fix: upgrade react-chartjs-2 from 2.10.0 to 2.11.2 (#8357) + * [b199cb947](https://github.com/argoproj/argo-workflows/commit/b199cb9474f7b1a3303a12858a2545aa85484d28) fix: upgrade history from 4.7.2 to 4.10.1 (#8356) + * [e40521556](https://github.com/argoproj/argo-workflows/commit/e4052155679a43cf083daf0c1b3fd5d45a5fbe24) fix: upgrade multiple dependencies with Snyk (#8355) + * [8c893bd13](https://github.com/argoproj/argo-workflows/commit/8c893bd13998b7dee09d0dd0c7a292b22509ca20) fix: upgrade com.google.code.gson:gson from 2.8.9 to 2.9.0 (#8354) + * [ee3765643](https://github.com/argoproj/argo-workflows/commit/ee3765643632fa6d8dbfb528a395cbb28608e2e8) feat: add message column to `kubectl get wf` and `argo list`. Fixes #8307 (#8353) + * [ae3881525](https://github.com/argoproj/argo-workflows/commit/ae3881525ce19a029a4798ff294e1b0c982e3268) fix: examples/README.md: overriten => overridden (#8351) + * [242d53596](https://github.com/argoproj/argo-workflows/commit/242d53596a5cf23b4470c2294204030ce11b01c4) fix: Fix response type for artifact service OpenAPI and SDKs. Fixes #7781 (#8332) + * [ab21eed52](https://github.com/argoproj/argo-workflows/commit/ab21eed527d15fa2c10272f740bff7c7963891c7) fix: upgrade io.swagger:swagger-annotations from 1.6.2 to 1.6.5 (#8335) + * [f708528fb](https://github.com/argoproj/argo-workflows/commit/f708528fbdfb9adecd8a66df866820eaab9a69ea) fix: upgrade react-monaco-editor from 0.36.0 to 0.47.0 (#8339) + * [3c35bd2f5](https://github.com/argoproj/argo-workflows/commit/3c35bd2f55dfdf641882cb5f9085b0b14f6d4d93) fix: upgrade cronstrue from 1.109.0 to 1.125.0 (#8338) + * [7ee17ddb7](https://github.com/argoproj/argo-workflows/commit/7ee17ddb7804e3f2beae87a8f532b1c0e6d1e520) fix: upgrade com.squareup.okhttp3:logging-interceptor from 4.9.1 to 4.9.3 (#8336) + * [68229e37e](https://github.com/argoproj/argo-workflows/commit/68229e37e295e3861cb7f6621ee3b9c7aabf8d67) added new-line to USERS.md (#8340) + * [94472c0ba](https://github.com/argoproj/argo-workflows/commit/94472c0bad4ed92ac06efb8c28563eba7b5bd1ab) chore(deps): bump cloud.google.com/go/storage from 1.20.0 to 1.22.0 (#8341) + * [aa9ff17d5](https://github.com/argoproj/argo-workflows/commit/aa9ff17d5feaa79aa26d9dc9cf9f67533f886b1c) fix: Remove path traversal CWE-23 (#8331) + * [14a9a1dc5](https://github.com/argoproj/argo-workflows/commit/14a9a1dc57f0d83231a19e76095ebdd4711f2594) fix: ui/package.json & ui/yarn.lock to reduce vulnerabilities (#8328) + * [58052c2b7](https://github.com/argoproj/argo-workflows/commit/58052c2b7b72daa928f8d427055be01cf896ff3e) fix: sdks/java/pom.xml to reduce vulnerabilities (#8327) + * [153540fdd](https://github.com/argoproj/argo-workflows/commit/153540fdd0e3b6f00050550abed67cae16299cbe) feat: Remove binaries from argoexec image. Fixes #7486 (#8292) + * [af8077423](https://github.com/argoproj/argo-workflows/commit/af807742343cb1a76926f6a1251466b9af988a47) feat: Always Show Workflow Parameters (#7809) + * [62e0a8ce4](https://github.com/argoproj/argo-workflows/commit/62e0a8ce4e74d2e19f3a9c0fb5e52bd58a6b944b) feat: Remove the PNS executor. Fixes #7804 (#8296) + * [0cdd2b40a](https://github.com/argoproj/argo-workflows/commit/0cdd2b40a8ee2d31476f8078eaedaa16c6827a76) fix: update docker version to address CVE-2022-24921 (#8312) + * [9c901456a](https://github.com/argoproj/argo-workflows/commit/9c901456a44501f11afc2bb1e856f0d0828fd13f) fix: Default value is ignored when loading params from configmap. Fixes #8262 (#8271) + * [9ab0e959a](https://github.com/argoproj/argo-workflows/commit/9ab0e959ac497433bcee2bb9c8d5710f87f1e3ea) fix: reduce number of workflows displayed in UI by default. Fixes #8297 (#8303) + * [13bc01362](https://github.com/argoproj/argo-workflows/commit/13bc013622c3b681bbd3c334dce0eea6870fcfde) fix: fix: git artifact will be checked out even if local file matches name of tracking branch (#8287) + * [65dc0882c](https://github.com/argoproj/argo-workflows/commit/65dc0882c9bb4496f1c4b2e0deb730e775724c82) feat: Fail on invalid config. (#8295) + * [5ac0e314d](https://github.com/argoproj/argo-workflows/commit/5ac0e314da80667e8b3b355c55cf9e1ab9b57b34) fix: `taskresults` owned by pod rather than workflow. (#8284) + * [996655f4f](https://github.com/argoproj/argo-workflows/commit/996655f4f3f03a30bcb82a1bb03f222fd100b8e0) fix: Snyk security recommendations (Golang). Fixes #8288 + * [221d99827](https://github.com/argoproj/argo-workflows/commit/221d9982713ca30c060955bb35b48af3143c3754) fix: Snyk security recommendations (Node). Fixes #8288 + * [b55dead05](https://github.com/argoproj/argo-workflows/commit/b55dead055139d1de33c464beed2b5ef596f5c8e) Revert "build: Enable governance bot. Fixes #8256 (#8259)" (#8294) + * [e50ec699c](https://github.com/argoproj/argo-workflows/commit/e50ec699cb33a7b84b0cb3c5b99396fe5365facd) chore(deps): bump google.golang.org/api from 0.73.0 to 0.74.0 (#8281) + * [954a3ee7e](https://github.com/argoproj/argo-workflows/commit/954a3ee7e7cc4f02074c07f7add971ca2be3291e) fix: install.yaml missing crb subject ns (#8280) + * [a3c326fdf](https://github.com/argoproj/argo-workflows/commit/a3c326fdf0d2133d5e78ef71854499f576e7e530) Remove hardcoded namespace in kustomize file #8250 (#8266) + * [b198b334d](https://github.com/argoproj/argo-workflows/commit/b198b334dfdb8e77d2ee51cd05b0716a29ab9169) fix: improve error message when the controller is set `templateReferencing: Secure` (#8277) + * [5598b8c7f](https://github.com/argoproj/argo-workflows/commit/5598b8c7fb5d17015e5c941e09953a74d8931436) feat: add resubmit and retry buttons for archived workflows. Fixes #7908 and #7911 (#8272) + * [6975607fa](https://github.com/argoproj/argo-workflows/commit/6975607fa33bf39e752b9cefcb8cb707a46bc6d4) chore(deps): bump github.com/prometheus/common from 0.32.1 to 0.33.0 (#8274) + * [78f01f2b9](https://github.com/argoproj/argo-workflows/commit/78f01f2b9f24a89db15a119885dfe8eb6420c70d) fix: patch workflow status to workflow (#8265) + * [f48998c07](https://github.com/argoproj/argo-workflows/commit/f48998c070c248688d996e5c8a4fec7601f5ab53) feat: Add a link in the UI for WorkflowTemplate. Fixes #4760 (#8208) + * [f02d4b72a](https://github.com/argoproj/argo-workflows/commit/f02d4b72adea9fbd23880c70871f92d66dc183c7) chore(deps): bump github.com/argoproj-labs/argo-dataflow (#8264) + * [48202fe99](https://github.com/argoproj/argo-workflows/commit/48202fe9976ff39731cf73c03578081a10146596) chore(deps): bump dependabot/fetch-metadata from 1.1.1 to 1.3.0 (#8263) + * [f00ec49d6](https://github.com/argoproj/argo-workflows/commit/f00ec49d695bdad108000abcdfd0f82f6af9ca6c) feat!: Refactor/simplify configuration code (#8235) + * [c1f72b662](https://github.com/argoproj/argo-workflows/commit/c1f72b66282012e712e28a715c08dddb1a556c16) feat: add archive retry command to argo CLI. Fixes #7907 (#8229) + * [7a07805b1](https://github.com/argoproj/argo-workflows/commit/7a07805b183d598847bb9323f1009d7e8bbc1ac6) fix: Update argo-server manifests to have read-only root file-system (#8210) + * [0d4b4dc34](https://github.com/argoproj/argo-workflows/commit/0d4b4dc34127a27f7ca6e5c41197f3aaacc79cb8) fix: Panic in Workflow Retry (#8243) + * [61f0decd8](https://github.com/argoproj/argo-workflows/commit/61f0decd873a6a422c3a7159d6023170637338ff) fix: Hook with wftemplateRef (#8242) + * [e232340cc](https://github.com/argoproj/argo-workflows/commit/e232340cc5191c5904afe87f03c80545bb10e430) fix: grep pattern (#8238) + * [1d373c41a](https://github.com/argoproj/argo-workflows/commit/1d373c41afbebcf8de55114582693bcbdc59b342) fix: submodule cloning via git. Fixes #7469 (#8225) + * [6ee1b03f9](https://github.com/argoproj/argo-workflows/commit/6ee1b03f9e83c1e129b45a6bc9292a99add6b36e) fix: do not panic when termination-log is not writeable (#8221) + * [cae38894f](https://github.com/argoproj/argo-workflows/commit/cae38894f96b0d33cde54ef9cdee3cda53692a8d) chore(deps): bump github.com/aliyun/aliyun-oss-go-sdk (#8232) + * [e0e45503e](https://github.com/argoproj/argo-workflows/commit/e0e45503e6704b27e3e9ef0ff4a98169f3b072fa) chore(deps): bump peter-evans/create-pull-request from 3 to 4 (#8216) + * [8c77e89fc](https://github.com/argoproj/argo-workflows/commit/8c77e89fc185ff640e1073692dfc7c043037440a) feat: add archive resubmit command to argo CLI. Fixes #7910 (#8166) + * [d8aa46731](https://github.com/argoproj/argo-workflows/commit/d8aa46731c74730ccca1a40187109a63a675618b) fix: Support `--parameters-file` where ARGO_SERVER specified. Fixes #8160 (#8213) + * [d33d391a4](https://github.com/argoproj/argo-workflows/commit/d33d391a4c06c136b6a0964a51c75850323684e6) feat: Add support to auto-mount service account tokens for plugins. (#8176) + * [8a1fbb86e](https://github.com/argoproj/argo-workflows/commit/8a1fbb86e7c83bf14990805166d04d5cb4479ea3) fix: removed deprecated k8sapi executor. Fixes #7802 (#8205) + * [4d5079822](https://github.com/argoproj/argo-workflows/commit/4d5079822da17fd644a99a9e4b27259864ae8c36) chore(deps): bump actions/cache from 2 to 3 (#8206) + * [12cd8bcaa](https://github.com/argoproj/argo-workflows/commit/12cd8bcaa75381b5a9fa65aff03ac13aec706375) fix: requeue not delete the considererd Task flag (#8194) + * [e2b288318](https://github.com/argoproj/argo-workflows/commit/e2b288318b15fa3e3cdc38c3dc7e66774920be8d) fix: Use `latest` image tag when version is `untagged`. Fixes #8188 (#8191) + * [6d6d23d81](https://github.com/argoproj/argo-workflows/commit/6d6d23d8110165331d924e97b01d5e26214c72db) fix: task worker requeue wrong task. Fixes #8139 (#8186) + * [41fd07aa4](https://github.com/argoproj/argo-workflows/commit/41fd07aa4f8462d70ad3c2c0481d5e09ae97b612) fix: Update `workflowtaskresult` code have own reconciliation loop. (#8135) + * [051c7b8d2](https://github.com/argoproj/argo-workflows/commit/051c7b8d2baf50b55e8076a1e09e7340551c04c1) fix: pkg/errors is no longer maintained (#7440) + * [fbb43b242](https://github.com/argoproj/argo-workflows/commit/fbb43b2429e45346221a119583aac11df4b5f880) fix: workflow.duration' is not available as a real time metric (#8181) + * [0e707cdf6](https://github.com/argoproj/argo-workflows/commit/0e707cdf69f891c7c7483e2244f5ea930d31b1c5) fix: Authentication for plugins. Fixes #8144 (#8147) + * [d4b1afe6f](https://github.com/argoproj/argo-workflows/commit/d4b1afe6f68afc3061a924186fa09556290ec3e1) feat: add retry API for archived workflows. Fixes #7906 (#7988) + * [e7008eada](https://github.com/argoproj/argo-workflows/commit/e7008eada7a885d80952b5184562a29508323c2a) fix: Correctly order emissary combined output. Fixes #8159 (#8175) + * [9101c4939](https://github.com/argoproj/argo-workflows/commit/9101c49396fe95d62ef3040cd4d330fde9f35554) fix: Add instance ID to `workflowtaskresults` (#8150) + * [2b5e4a1d2](https://github.com/argoproj/argo-workflows/commit/2b5e4a1d2df7877d9b7b7fbedd7136a125a39c8d) feat: Use pinned executor version. (#8165) + * [715f6ced6](https://github.com/argoproj/argo-workflows/commit/715f6ced6f42c0b7b5994bf8d16c561f48025fe8) fix: add /etc/mime.types mapping table (#8171) + * [6d6e08aa8](https://github.com/argoproj/argo-workflows/commit/6d6e08aa826c406a912387ac438ec20428c7623d) fix: Limit workflows to 128KB and return a friendly error message (#8169) + * [057c3346f](https://github.com/argoproj/argo-workflows/commit/057c3346f9f792cf10888320c4297b09f3c11e2e) feat: add TLS config option to HTTP template. Fixes #7390 (#7929) + * [013fa2578](https://github.com/argoproj/argo-workflows/commit/013fa2578bc5cace4de754daef04448b30faae32) chore(deps): bump github.com/stretchr/testify from 1.7.0 to 1.7.1 (#8163) + * [ad341c4af](https://github.com/argoproj/argo-workflows/commit/ad341c4af1645c191a5736d91d78a19acc7b2fa7) chore(deps): bump google.golang.org/api from 0.72.0 to 0.73.0 (#8162) + * [5efc9fc99](https://github.com/argoproj/argo-workflows/commit/5efc9fc995ac898672a575b514f8bfc83b220c4c) feat: add mysql options (#8157) + * [cda5737c3](https://github.com/argoproj/argo-workflows/commit/cda5737c37e3ab7c381869d7d820de71285f55a5) chore(deps): bump google.golang.org/api from 0.71.0 to 0.72.0 (#8156) + * [be2dd19a0](https://github.com/argoproj/argo-workflows/commit/be2dd19a0718577348823f1f68b82dbef8d95959) Update USERS.md (#8132) + * [af26ff7ed](https://github.com/argoproj/argo-workflows/commit/af26ff7ed54d4fe508edac34f82fe155f2d54a9d) fix: Remove need for `get pods` from Emissary (#8133) + * [537dd3be6](https://github.com/argoproj/argo-workflows/commit/537dd3be6bf93be37e06d768d9a610038eafb361) feat: Change pod clean-up to use informer. (#8136) + * [1d71fb3c4](https://github.com/argoproj/argo-workflows/commit/1d71fb3c4ebdb2891435ed12257743331ff34436) chore(deps): bump github.com/spf13/cobra from 1.3.0 to 1.4.0 (#8131) + * [972a4e989](https://github.com/argoproj/argo-workflows/commit/972a4e98987296a844a28dce31162d59732e6532) fix(plugins): UX improvements (#8122) + * [437b37647](https://github.com/argoproj/argo-workflows/commit/437b3764783b48a304034cc4291472c6e490689b) feat: add resubmit API for archived workflows. Fixes #7909 (#8079) + * [707cf8321](https://github.com/argoproj/argo-workflows/commit/707cf8321ccaf98b4596695fdbfdb04faf9a9487) update kustomize/kubectl installation (#8095) + * [48348247f](https://github.com/argoproj/argo-workflows/commit/48348247f0a0fd949871a9f982d7ee70c39509a1) chore(deps): bump google.golang.org/api from 0.70.0 to 0.71.0 (#8108) + * [765333dc9](https://github.com/argoproj/argo-workflows/commit/765333dc95575608fdf87328c7548c5e349b557d) fix(executor): Retry kubectl on internal transient error (#8092) + * [4d4890454](https://github.com/argoproj/argo-workflows/commit/4d4890454e454acbc86cef039bb6905c63f79e73) fix: Fix the TestStopBehavior flackiness (#8096) + * [6855f4c51](https://github.com/argoproj/argo-workflows/commit/6855f4c51b5bd667599f072ae5ddde48967006f1) fix: pod deleted due to delayed cleanup. Fixes #8022 (#8061) + +### Contributors + + * Aatman + * Adam Eri + * Alex Collins + * BOOK + * Basanth Jenu H B + * Brian Loss + * Cash Williams + * Clemens Lange + * Dakota Lillie + * Dana Pieluszczak + * Dillen Padhiar + * Doğukan + * Ezequiel Muns + * Felix Seidel + * Fernando Luís da Silva + * Gaurav Gupta + * Grzegorz Bielski + * Hao Xin + * Iain Lane + * Isitha Subasinghe + * Iván Sánchez + * JasonZhu + * Jessie Teng + * Juan Luis Cano Rodríguez + * Julie Vogelman + * Kesavan + * LoricAndre + * Manik Sidana + * Marc Abramowitz + * Mark Shields + * Markus Lippert + * Michael Goodness + * Michael Weibel + * Mike Tougeron + * Ming Yu Shi + * Miroslav Boussarov + * Noam Gal + * Philippe Richard + * Rohan Kumar + * Sanjay Tiwari + * Saravanan Balasubramanian + * Shubham Nazare + * Snyk bot + * Soumya Ghosh Dastidar + * Stephanie Palis + * Swarnim Pratap Singh + * Takumi Sue + * Tianchu Zhao + * Timo Pagel + * Tristan Colgate-McFarlane + * Tuan + * Vignesh + * William Van Hevelingen + * Wu Jayway + * Yuan Tang + * alexdittmann + * dependabot[bot] + * hadesy + * ibuder + * kennytrytek + * lijie + * mihirpandya-greenops + * momom-i + * shirou + * smile-luobin + * tatsuya-ogawa + * tculp + * ybyang + * İnanç Dokurel + +## v3.3.9 (2022-08-09) + + * [5db53aa0c](https://github.com/argoproj/argo-workflows/commit/5db53aa0ca54e51ca69053e1d3272e37064559d7) Revert "fix: Correct kill command. Fixes #8687 (#8908)" + * [b7b37d5aa](https://github.com/argoproj/argo-workflows/commit/b7b37d5aa2229c09365735fab165b4876c30aa4a) fix: Skip TestRunAsNonRootWithOutputParams + * [e4dca01f1](https://github.com/argoproj/argo-workflows/commit/e4dca01f1a76cefb7cae944ba0c4e54bc0aec427) fix: SignalsSuite test + * [151432f9b](https://github.com/argoproj/argo-workflows/commit/151432f9b754981959e149202d5f4b0617064595) fix: add containerRuntimeExecutor: emissary in ci + * [a3d6a58a7](https://github.com/argoproj/argo-workflows/commit/a3d6a58a71e1603077a4b39c4368d11847d500fb) feat: refactoring e2e test timeouts to support multiple environments. (#8925) + * [f9e2dd21c](https://github.com/argoproj/argo-workflows/commit/f9e2dd21cb09ac90b639be0f97f07da373240202) fix: lint + * [ef3fb421f](https://github.com/argoproj/argo-workflows/commit/ef3fb421f02f96195046ba327beca7b08753530b) fix: Correct kill command. Fixes #8687 (#8908) + * [e85c815a1](https://github.com/argoproj/argo-workflows/commit/e85c815a10fb59cb95cfdf6d2a171cea7c6aec47) fix: set NODE_OPTIONS to no-experimental-fetch to prevent yarn start error (#8802) + * [a19c94bb6](https://github.com/argoproj/argo-workflows/commit/a19c94bb6639540f309883ff0f41b14dd557324b) fix: Omitted task result should also be valid (#8776) + * [15f9d5227](https://github.com/argoproj/argo-workflows/commit/15f9d52270af4bca44553755d095d2dd8badfa14) fix: Fixed podName in killing daemon pods. Fixes #8692 (#8708) + * [6ec0ca088](https://github.com/argoproj/argo-workflows/commit/6ec0ca0883cf4e2222176ab413b3318017a30796) fix: open minio dashboard on different port in quick-start (#8407) + * [d874c1a87](https://github.com/argoproj/argo-workflows/commit/d874c1a87b65b300b2a4c93032bd2970d6f91d8f) fix: ui/package.json & ui/yarn.lock to reduce vulnerabilities (#8328) + * [481137c25](https://github.com/argoproj/argo-workflows/commit/481137c259b05c6a5b3c0e3adab1649c2b512364) fix: sdks/java/pom.xml to reduce vulnerabilities (#8327) + * [f54fb5c24](https://github.com/argoproj/argo-workflows/commit/f54fb5c24dd52a64da6d5aad5972a6554e386769) fix: grep pattern (#8238) + * [73334cae9](https://github.com/argoproj/argo-workflows/commit/73334cae9fbaef96b63889e16a3a2f78c725995e) fix: removed deprecated k8sapi executor. Fixes #7802 (#8205) + * [9c9efa67f](https://github.com/argoproj/argo-workflows/commit/9c9efa67f38620eeb08d1a9d2bb612bf14bf33de) fix: retryStrategy.Limit is now read properly for backoff strategy. Fixes #9170. (#9213) + * [69b5f1d79](https://github.com/argoproj/argo-workflows/commit/69b5f1d7945247a9e219b53f12fb8b3eec6e5e52) fix: Add missing Go module entries + +### Contributors + + * Alex Collins + * Dillen Padhiar + * Grzegorz Bielski + * Julie Vogelman + * Kesavan + * Rohan Kumar + * Saravanan Balasubramanian + * Snyk bot + * Takumi Sue + * Yuan Tang + +## v3.3.8 (2022-06-23) + + * [621b0d1a8](https://github.com/argoproj/argo-workflows/commit/621b0d1a8e09634666ebe403ee7b8fc29db1dc4e) fix: check for nil, and add logging to expose root cause of panic in Issue 8968 (#9010) + * [b7c218c0f](https://github.com/argoproj/argo-workflows/commit/b7c218c0f7b3ea0035dc44ccc9e8416f30429d16) feat: log workflow size before hydrating/dehydrating. Fixes #8976 (#8988) + +### Contributors + + * Dillen Padhiar + * Julie Vogelman + +## v3.3.7 (2022-06-20) + + * [479763c04](https://github.com/argoproj/argo-workflows/commit/479763c04036db98cd1e9a7a4fc0cc932affb8bf) fix: Skip TestExitHookWithExpression() completely (#8761) + * [a1ba42140](https://github.com/argoproj/argo-workflows/commit/a1ba42140154e757b024fe29c61fc7043c741cee) fix: Template in Lifecycle hook should be optional (#8735) + * [f10d6238d](https://github.com/argoproj/argo-workflows/commit/f10d6238d83b410a461d1860d0bb3c7ae4d74383) fix: Simplify return logic in executeTmplLifeCycleHook (#8736) + * [f2ace043b](https://github.com/argoproj/argo-workflows/commit/f2ace043bb7d050e8d539a781486c9f932bca931) fix: Exit lifecycle hook should respect expression. Fixes #8742 (#8744) + * [8c0b43569](https://github.com/argoproj/argo-workflows/commit/8c0b43569bb3e9c9ace21afcdd89d2cec862939c) fix: long code blocks overflow in ui. Fixes #8916 (#8947) + * [1d26628b8](https://github.com/argoproj/argo-workflows/commit/1d26628b8bc5f5a4d90d7a31b6f8185f280a4538) fix: sync cluster Workflow Template Informer before it's used (#8961) + * [4d9f8f7c8](https://github.com/argoproj/argo-workflows/commit/4d9f8f7c832ff888c11a41dad7a755ef594552c7) fix: Workflow Duration metric shouldn't increase after workflow complete (#8989) + * [72e0c6f00](https://github.com/argoproj/argo-workflows/commit/72e0c6f006120f901f02ea3a6bf8b3e7f639eb48) fix: add nil check for retryStrategy.Limit in deadline check. Fixes #8990 (#8991) + +### Contributors + + * Dakota Lillie + * Dillen Padhiar + * Julie Vogelman + * Saravanan Balasubramanian + * Yuan Tang + +## v3.3.6 (2022-05-25) + + * [2b428be80](https://github.com/argoproj/argo-workflows/commit/2b428be8001a9d5d232dbd52d7e902812107eb28) fix: Handle omitted nodes in DAG enhanced depends logic. Fixes #8654 (#8672) + * [7889af614](https://github.com/argoproj/argo-workflows/commit/7889af614c354f4716752942891cbca0a0889df0) fix: close http body. Fixes #8622 (#8624) + * [622c3d594](https://github.com/argoproj/argo-workflows/commit/622c3d59467a2d0449717ab866bd29bbd0469795) fix: Do not log container not found (#8509) + * [7091d8003](https://github.com/argoproj/argo-workflows/commit/7091d800360ad940ec605378324909823911d853) fix: pkg/errors is no longer maintained (#7440) + * [3f4c79fa5](https://github.com/argoproj/argo-workflows/commit/3f4c79fa5f54edcb50b6003178af85c70b5a8a1f) feat: remove size limit of 128kb for workflow templates. Fixes #8789 (#8796) + +### Contributors + + * Alex Collins + * Dillen Padhiar + * Stephanie Palis + * Yuan Tang + * lijie + +## v3.3.5 (2022-05-03) + + * [93cb050e3](https://github.com/argoproj/argo-workflows/commit/93cb050e3933638f0dbe2cdd69630e133b3ad52a) Revert "fix: Pod `OOMKilled` should fail workflow. Fixes #8456 (#8478)" + * [29f3ad844](https://github.com/argoproj/argo-workflows/commit/29f3ad8446ac5f07abda0f6844f3a31a7d50eb23) fix: Added artifact Content-Security-Policy (#8585) + * [a40d27cd7](https://github.com/argoproj/argo-workflows/commit/a40d27cd7535f6d36d5fb8d10cea0226b784fa65) fix: Support memoization on plugin node. Fixes #8553 (#8554) + * [f2b075c29](https://github.com/argoproj/argo-workflows/commit/f2b075c29ee97c95cfebb453b18c0ce5f16a5f04) fix: Pod `OOMKilled` should fail workflow. Fixes #8456 (#8478) + * [ba8c60022](https://github.com/argoproj/argo-workflows/commit/ba8c600224b7147d1832de1bea694fd376570ae9) fix: prevent backoff when retryStrategy.limit has been reached. Fixes #7588 (#8090) + * [c17f8c71d](https://github.com/argoproj/argo-workflows/commit/c17f8c71d40d4e34ef0a87dbc95eda005a57dc39) fix: update docker version to address CVE-2022-24921 (#8312) + * [9d0b7aa56](https://github.com/argoproj/argo-workflows/commit/9d0b7aa56cf065bf70c2cfb43f71ea9f92b5f964) fix: Default value is ignored when loading params from configmap. Fixes #8262 (#8271) + * [beab5b6ef](https://github.com/argoproj/argo-workflows/commit/beab5b6ef40a187e90ff23294bb1d9e2db9cb90a) fix: install.yaml missing crb subject ns (#8280) + * [b0d8be2ef](https://github.com/argoproj/argo-workflows/commit/b0d8be2ef3d3c1c96b15aeda572fcd1596fca9f1) fix: requeue not delete the considererd Task flag (#8194) + +### Contributors + + * Alex Collins + * Cash Williams + * Rohan Kumar + * Soumya Ghosh Dastidar + * Wu Jayway + * Yuan Tang + * ybyang + +## v3.3.4 (2022-04-29) + + * [02fb874f5](https://github.com/argoproj/argo-workflows/commit/02fb874f5deb3fc3e16f033c6f60b10e03504d00) feat: add capability to choose params in suspend node.Fixes #8425 (#8472) + * [32b1b3a3d](https://github.com/argoproj/argo-workflows/commit/32b1b3a3d505dea1d42fdeb0104444ca4f5e5795) feat: Add support to auto-mount service account tokens for plugins. (#8176) + +### Contributors + + * Alex Collins + * Basanth Jenu H B + +## v3.3.3 (2022-04-25) + + * [9c08aedc8](https://github.com/argoproj/argo-workflows/commit/9c08aedc880026161d394207acbac0f64db29a53) fix: Revert controller readiness changes. Fixes #8441 (#8454) + * [9854dd3fc](https://github.com/argoproj/argo-workflows/commit/9854dd3fccccd34bf3e4f110412dbd063f3316c2) fix: PodGC works with WorkflowTemplate. Fixes #8448 (#8452) + +### Contributors + + * Alex Collins + +## v3.3.2 (2022-04-20) + + * [35492a170](https://github.com/argoproj/argo-workflows/commit/35492a1700a0f279694cac874b6d9c07a08265d1) fix: Remove binaries from Windows image. Fixes #8417 (#8420) + * [bfc3b6cad](https://github.com/argoproj/argo-workflows/commit/bfc3b6cad02c0a38141201d7f77e14e3f0e637a4) fix: Skip TestRunAsNonRootWithOutputParams + * [1c34f9801](https://github.com/argoproj/argo-workflows/commit/1c34f9801b502d1566064726145ce5d68124b213) fix: go.sum + * [be35b54b0](https://github.com/argoproj/argo-workflows/commit/be35b54b00e44339f8dcb63d0411bc80f8983764) fix: create cache lint + * [017a31518](https://github.com/argoproj/argo-workflows/commit/017a3151837ac05cca1b2425a8395d547d86ed09) fix: create cache lint + * [20d601b3d](https://github.com/argoproj/argo-workflows/commit/20d601b3dd2ebef102a1a610e4dbef6924f842ff) fix: create cache lint + * [d8f28586f](https://github.com/argoproj/argo-workflows/commit/d8f28586f82b1bdb9e43446bd1792b3b01b2928a) fix: empty push + * [f41d94e91](https://github.com/argoproj/argo-workflows/commit/f41d94e91648961dfdc6e8536768012569dcd28f) fix: codegen + * [ce195dd52](https://github.com/argoproj/argo-workflows/commit/ce195dd521e195df4edd96bcd27fd950f23ff611) fix: Add auth for SDKs. Fixes #8230 (#8367) + * [00c960619](https://github.com/argoproj/argo-workflows/commit/00c9606197c30c138714b27ca5624dd0272c662d) fix: unittest + * [a0148c1b3](https://github.com/argoproj/argo-workflows/commit/a0148c1b32fef820a0cde5a5fed1975abedb7f82) chore(deps): bump github.com/minio/minio-go/v7 from 7.0.23 to 7.0.24 (#8397) + * [5207d287b](https://github.com/argoproj/argo-workflows/commit/5207d287b5657d9049edd1b67c2b681a13c40420) fix: codegen + * [e68e06c34](https://github.com/argoproj/argo-workflows/commit/e68e06c3453453d70a76c08b1a6cb00635b2d941) fix: Daemon step updated 'pod delete' while pod is running (#8399) + * [b9f8b3587](https://github.com/argoproj/argo-workflows/commit/b9f8b3587345eda47edfaebb7bc18ea1193d430b) fix: Add readiness check to controller. Fixes #8283 (#8285) + * [ed26dc0a0](https://github.com/argoproj/argo-workflows/commit/ed26dc0a09bc38ac2366124621ea98918b95b34a) fix: Absolute submodules in git artifacts. Fixes #8377 (#8381) + * [6f77c0af0](https://github.com/argoproj/argo-workflows/commit/6f77c0af03545611dfef0222bcf5f5f76f30f4d4) fix: Back-off UI retries. Fixes #5697 (#8333) + * [8d5c2f2a3](https://github.com/argoproj/argo-workflows/commit/8d5c2f2a39033972e1f389029f5c08290aa19ccd) fix: replace `podName` with `nodeId` in `_.primary.swagger.json` (#8385) + * [a327edd5a](https://github.com/argoproj/argo-workflows/commit/a327edd5a5c5e7aff4c64131f1a9c3d9e5d9d3eb) fix: removed error from artifact server 401 response. Fixes #8382 (#8383) + * [502cf6d88](https://github.com/argoproj/argo-workflows/commit/502cf6d882ac51fd80950c2f25f90e491b3f13f6) fix: Updating complated node status + * [0a0956864](https://github.com/argoproj/argo-workflows/commit/0a09568648199fcc5a8997e4f5eed55c40bfa974) fix: Fix response type for artifact service OpenAPI and SDKs. Fixes #7781 (#8332) + * [a3bce2aaf](https://github.com/argoproj/argo-workflows/commit/a3bce2aaf94b07a73c3a7a4c9205872be7dc360c) fix: patch workflow status to workflow (#8265) + * [c5174fbee](https://github.com/argoproj/argo-workflows/commit/c5174fbeec69aa0ea4dbad8b239b7e46c76e5873) fix: Update argo-server manifests to have read-only root file-system (#8210) + * [ba795e656](https://github.com/argoproj/argo-workflows/commit/ba795e6562902d66adadd15554f791bc85b779a8) fix: Panic in Workflow Retry (#8243) + * [c95de6bb2](https://github.com/argoproj/argo-workflows/commit/c95de6bb25b8d7294f8f48490fccb2ba95d96f9b) fix: Hook with wftemplateRef (#8242) + * [187c21fa7](https://github.com/argoproj/argo-workflows/commit/187c21fa7b45d87c55dd71f247e439f6c9b776b3) fix: submodule cloning via git. Fixes #7469 (#8225) + * [289d44b9b](https://github.com/argoproj/argo-workflows/commit/289d44b9b0234baf24f1384a0b6743ca10bfb060) fix: do not panic when termination-log is not writeable (#8221) + * [c10ba38a8](https://github.com/argoproj/argo-workflows/commit/c10ba38a86eb2ba4e70812b172a02bea901073f1) fix: Support `--parameters-file` where ARGO_SERVER specified. Fixes #8160 (#8213) + * [239781109](https://github.com/argoproj/argo-workflows/commit/239781109e62e405a6596e88c706df21cf152a6e) fix: Use `latest` image tag when version is `untagged`. Fixes #8188 (#8191) + * [7d00fa9d9](https://github.com/argoproj/argo-workflows/commit/7d00fa9d94427e5b30bea3c3bd7fecd673b95870) fix: task worker requeue wrong task. Fixes #8139 (#8186) + * [ed6907f1c](https://github.com/argoproj/argo-workflows/commit/ed6907f1cafb1cd53a877c1bdebbf0497ab53278) fix: Authentication for plugins. Fixes #8144 (#8147) + * [5ff9bc9aa](https://github.com/argoproj/argo-workflows/commit/5ff9bc9aaba80db7833d513321bb6ae2d305f1f9) fix: Correctly order emissary combined output. Fixes #8159 (#8175) + * [918c27311](https://github.com/argoproj/argo-workflows/commit/918c273113ed14349c8df87d727a5b8070d301a1) fix: Add instance ID to `workflowtaskresults` (#8150) + * [af0cfab8f](https://github.com/argoproj/argo-workflows/commit/af0cfab8f3bd5b62ebe967381fed0bccbd7c7ada) fix: Update `workflowtaskresult` code have own reconciliation loop. (#8135) + * [3a425ec5a](https://github.com/argoproj/argo-workflows/commit/3a425ec5a1010e9b9ac2aac054095e5e9d240693) fix: Authentication for plugins. Fixes #8144 (#8147) + * [cdd1633e4](https://github.com/argoproj/argo-workflows/commit/cdd1633e428d8596467e7673d0d6d5c50ade41af) fix: Correctly order emissary combined output. Fixes #8159 (#8175) + * [22c203fc4](https://github.com/argoproj/argo-workflows/commit/22c203fc44a005e4207fff5b8ce7f4854ed0bf78) fix: Add instance ID to `workflowtaskresults` (#8150) + * [79a9a5b6f](https://github.com/argoproj/argo-workflows/commit/79a9a5b6fcca7953e740a5e171d3bc7f08953854) fix: improve error message when the controller is set `templateReferencing: Secure` (#8277) + * [7e880216a](https://github.com/argoproj/argo-workflows/commit/7e880216a1bf384d15d836877d170bbeea19814d) fix: `taskresults` owned by pod rather than workflow. (#8284) + * [347583132](https://github.com/argoproj/argo-workflows/commit/347583132916fd2f87b3885381fe86281ea3ec33) fix: fix: git artifact will be checked out even if local file matches name of tracking branch (#8287) + * [aa460b9ad](https://github.com/argoproj/argo-workflows/commit/aa460b9adc40ed4854dc373d0d755e6d36b633f8) fix: reduce number of workflows displayed in UI by default. Fixes #8297 (#8303) + +### Contributors + + * Aatman + * Alex Collins + * Dillen Padhiar + * Markus Lippert + * Michael Weibel + * Rohan Kumar + * Saravanan Balasubramanian + * Takumi Sue + * Tristan Colgate-McFarlane + * Wu Jayway + * dependabot[bot] + ## v3.3.1 (2022-03-18) * [76ff748d4](https://github.com/argoproj/argo-workflows/commit/76ff748d41c67e1a38ace1352ca3bab8d7ec8a39) feat: add TLS config option to HTTP template. Fixes #7390 (#7929) @@ -444,6 +1172,22 @@ * zorulo * 大雄 +## v3.2.11 (2022-05-03) + + * [8faf269a7](https://github.com/argoproj/argo-workflows/commit/8faf269a795c0c9cc251152f9e4db4cd49234e52) fix: Remove binaries from Windows image. Fixes #8417 (#8420) + +### Contributors + + * Markus Lippert + +## v3.2.10 (2022-05-03) + + * [877216e21](https://github.com/argoproj/argo-workflows/commit/877216e2159f07bfb27aa1991aa249bc2e9a250c) fix: Added artifact Content-Security-Policy (#8585) + +### Contributors + + * Alex Collins + ## v3.2.9 (2022-03-02) * [ce91d7b1d](https://github.com/argoproj/argo-workflows/commit/ce91d7b1d0115d5c73f6472dca03ddf5cc2c98f4) fix(controller): fix pod stuck in running when using podSpecPatch and emissary (#7407) diff --git a/Dockerfile b/Dockerfile index 76fa22175e07..7ad207b20e97 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,6 @@ #syntax=docker/dockerfile:1.2 -ARG DOCKER_CHANNEL=stable -ARG DOCKER_VERSION=20.10.12 -# NOTE: kubectl version should be one minor version less than https://storage.googleapis.com/kubernetes-release/release/stable.txt -ARG KUBECTL_VERSION=1.22.3 -ARG JQ_VERSION=1.6 - -FROM golang:1.17 as builder +FROM golang:1.18 as builder RUN apt-get update && apt-get --no-install-recommends install -y \ git \ @@ -15,9 +9,7 @@ RUN apt-get update && apt-get --no-install-recommends install -y \ apt-transport-https \ ca-certificates \ wget \ - gcc \ - libcap2-bin \ - zip && \ + gcc && \ apt-get clean \ && rm -rf \ /var/lib/apt/lists/* \ @@ -37,33 +29,6 @@ RUN go mod download COPY . . -#################################################################################################### - -FROM alpine:3 as argoexec-base - -ARG DOCKER_CHANNEL -ARG DOCKER_VERSION -ARG KUBECTL_VERSION - -RUN apk --no-cache add curl procps git tar libcap jq - -COPY hack/arch.sh hack/os.sh /bin/ - -RUN if [ $(arch.sh) = ppc64le ] || [ $(arch.sh) = s390x ]; then \ - curl -o docker.tgz https://download.docker.com/$(os.sh)/static/${DOCKER_CHANNEL}/$(uname -m)/docker-18.06.3-ce.tgz; \ - else \ - curl -o docker.tgz https://download.docker.com/$(os.sh)/static/${DOCKER_CHANNEL}/$(uname -m)/docker-${DOCKER_VERSION}.tgz; \ - fi && \ - tar --extract --file docker.tgz --strip-components 1 --directory /usr/local/bin/ && \ - rm docker.tgz -RUN curl -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/v${KUBECTL_VERSION}/bin/$(os.sh)/$(arch.sh)/kubectl && \ - chmod +x /usr/local/bin/kubectl -RUN rm /bin/arch.sh /bin/os.sh - -COPY hack/ssh_known_hosts /etc/ssh/ -COPY hack/nsswitch.conf /etc/ - - #################################################################################################### FROM node:16 as argo-ui @@ -81,13 +46,21 @@ RUN NODE_OPTIONS="--max-old-space-size=2048" JOBS=max yarn --cwd ui build FROM builder as argoexec-build +COPY hack/arch.sh hack/os.sh /bin/ + +# NOTE: kubectl version should be one minor version less than https://storage.googleapis.com/kubernetes-release/release/stable.txt +RUN curl -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/v1.22.3/bin/$(os.sh)/$(arch.sh)/kubectl && \ + chmod +x /usr/local/bin/kubectl + +RUN curl -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 && \ + chmod +x /usr/local/bin/jq + # Tell git to forget about all of the files that were not included because of .dockerignore in order to ensure that # the git state is "clean" even though said .dockerignore files are not present RUN cat .dockerignore >> .gitignore RUN git status --porcelain | cut -c4- | xargs git update-index --skip-worktree RUN --mount=type=cache,target=/root/.cache/go-build make dist/argoexec -RUN setcap CAP_SYS_PTRACE,CAP_SYS_CHROOT+ei dist/argoexec #################################################################################################### @@ -119,27 +92,32 @@ RUN --mount=type=cache,target=/root/.cache/go-build make dist/argo #################################################################################################### -FROM argoexec-base as argoexec +FROM gcr.io/distroless/static as argoexec -COPY --from=argoexec-build /go/src/github.com/argoproj/argo-workflows/dist/argoexec /usr/local/bin/ +COPY --from=argoexec-build /usr/local/bin/kubectl /bin/ +COPY --from=argoexec-build /usr/local/bin/jq /bin/ +COPY --from=argoexec-build /go/src/github.com/argoproj/argo-workflows/dist/argoexec /bin/ COPY --from=argoexec-build /etc/mime.types /etc/mime.types +COPY hack/ssh_known_hosts /etc/ssh/ +COPY hack/nsswitch.conf /etc/ ENTRYPOINT [ "argoexec" ] #################################################################################################### -FROM scratch as workflow-controller +FROM gcr.io/distroless/static as workflow-controller USER 8737 -COPY --chown=8737 --from=workflow-controller-build /usr/share/zoneinfo /usr/share/zoneinfo +COPY hack/ssh_known_hosts /etc/ssh/ +COPY hack/nsswitch.conf /etc/ COPY --chown=8737 --from=workflow-controller-build /go/src/github.com/argoproj/argo-workflows/dist/workflow-controller /bin/ ENTRYPOINT [ "workflow-controller" ] #################################################################################################### -FROM scratch as argocli +FROM gcr.io/distroless/static as argocli USER 8737 @@ -147,7 +125,6 @@ WORKDIR /home/argo COPY hack/ssh_known_hosts /etc/ssh/ COPY hack/nsswitch.conf /etc/ -COPY --from=argocli-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ COPY --from=argocli-build /go/src/github.com/argoproj/argo-workflows/dist/argo /bin/ ENTRYPOINT [ "argo" ] diff --git a/Dockerfile.windows b/Dockerfile.windows index 2840066c04c7..fcdc1aa06e55 100644 --- a/Dockerfile.windows +++ b/Dockerfile.windows @@ -8,7 +8,7 @@ ARG IMAGE_OS_VERSION=1809 # had issues with official golange image for windows so I'm using plain servercore FROM mcr.microsoft.com/windows/servercore:${IMAGE_OS_VERSION} as builder -ENV GOLANG_VERSION=1.17 +ENV GOLANG_VERSION=1.18 SHELL ["powershell", "-Command"] # install chocolatey package manager @@ -19,7 +19,7 @@ RUN iex ((new-object net.webclient).DownloadString('https://chocolatey.org/insta # install golang, dep and other tools RUN choco install golang --version=$env:GOLANG_VERSION ; \ - choco install make dep docker-cli git.portable 7zip.portable + choco install make dep git.portable 7zip.portable #################################################################################################### # argoexec-base @@ -36,13 +36,11 @@ RUN mkdir C:\app && \ curl -L -o C:\app\kubectl.exe "https://storage.googleapis.com/kubernetes-release/release/v%KUBECTL_VERSION%/bin/windows/amd64/kubectl.exe" && \ curl -L -o C:\app\jq.exe "https://github.com/stedolan/jq/releases/download/jq-%JQ_VERSION%/jq-win64.exe" -COPY --from=builder C:/ProgramData/chocolatey/lib/docker-cli/tools/docker/docker.exe C:/app/docker.exe -COPY --from=builder C:/tools/git C:/app/git COPY --from=builder C:/ProgramData/chocolatey/lib/7zip.portable/tools/7z-extra/x64/7za.exe C:/app/7za.exe # add binaries to path USER Administrator -RUN SETX /m path C:\app;C:\app\git\bin;%path% +RUN SETX /m path C:\app;%path% #################################################################################################### # Argo Build stage which performs the actual build of Argo binaries diff --git a/Makefile b/Makefile index 2eaecfc8da20..c032c0b414c5 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,7 @@ SRC := $(GOPATH)/src/github.com/argoproj/argo-workflows GREP_LOGS := "" + # docker image publishing options IMAGE_NAMESPACE ?= quay.io/argoproj DEV_IMAGE ?= $(shell [ `uname -s` = Darwin ] && echo true || echo false) @@ -28,6 +29,12 @@ K3D_CLUSTER_NAME ?= k3s-default KUBE_NAMESPACE ?= argo MANAGED_NAMESPACE ?= $(KUBE_NAMESPACE) +# Timeout for wait conditions +E2E_WAIT_TIMEOUT ?= 1m + +E2E_PARALLEL ?= 20 +E2E_SUITE_TIMEOUT ?= 15m + VERSION := latest DOCKER_PUSH := false @@ -44,10 +51,15 @@ else STATIC_FILES ?= $(shell [ $(DEV_BRANCH) = true ] && echo false || echo true) endif -UI ?= false +# start the Controller +CTRL ?= true +# tail logs +LOGS ?= $(CTRL) +# start the UI +UI ?= $(shell [ $(CTRL) = true ] && echo false || echo true) # start the Argo Server API ?= $(UI) -GOTEST ?= go test -v +GOTEST ?= go test -v -p 20 PROFILE ?= minimal PLUGINS ?= $(shell [ $PROFILE = plugins ] && echo false || echo true) # by keeping this short we speed up the tests @@ -58,6 +70,8 @@ AUTH_MODE := hybrid ifeq ($(PROFILE),sso) AUTH_MODE := sso endif +# whether or not to start the Azurite test service for Azure Blob Storage +AZURE := false # Which mode to run in: # * `local` run the workflow–controller and argo-server as single replicas on the local machine (default) @@ -80,7 +94,7 @@ ALWAYS_OFFLOAD_NODE_STATUS := false $(info GIT_COMMIT=$(GIT_COMMIT) GIT_BRANCH=$(GIT_BRANCH) GIT_TAG=$(GIT_TAG) GIT_TREE_STATE=$(GIT_TREE_STATE) RELEASE_TAG=$(RELEASE_TAG) DEV_BRANCH=$(DEV_BRANCH) VERSION=$(VERSION)) $(info KUBECTX=$(KUBECTX) DOCKER_DESKTOP=$(DOCKER_DESKTOP) K3D=$(K3D) DOCKER_PUSH=$(DOCKER_PUSH)) -$(info RUN_MODE=$(RUN_MODE) PROFILE=$(PROFILE) AUTH_MODE=$(AUTH_MODE) SECURE=$(SECURE) STATIC_FILES=$(STATIC_FILES) ALWAYS_OFFLOAD_NODE_STATUS=$(ALWAYS_OFFLOAD_NODE_STATUS) UPPERIO_DB_DEBUG=$(UPPERIO_DB_DEBUG) LOG_LEVEL=$(LOG_LEVEL) NAMESPACED=$(NAMESPACED)) +$(info RUN_MODE=$(RUN_MODE) PROFILE=$(PROFILE) AUTH_MODE=$(AUTH_MODE) SECURE=$(SECURE) STATIC_FILES=$(STATIC_FILES) ALWAYS_OFFLOAD_NODE_STATUS=$(ALWAYS_OFFLOAD_NODE_STATUS) UPPERIO_DB_DEBUG=$(UPPERIO_DB_DEBUG) LOG_LEVEL=$(LOG_LEVEL) NAMESPACED=$(NAMESPACED) AZURE=$(AZURE)) override LDFLAGS += \ -X github.com/argoproj/argo-workflows/v3.version=$(VERSION) \ @@ -100,7 +114,6 @@ endif ARGOEXEC_PKGS := $(shell echo cmd/argoexec && go list -f '{{ join .Deps "\n" }}' ./cmd/argoexec/ | grep 'argoproj/argo-workflows/v3/' | cut -c 39-) CLI_PKGS := $(shell echo cmd/argo && go list -f '{{ join .Deps "\n" }}' ./cmd/argo/ | grep 'argoproj/argo-workflows/v3/' | cut -c 39-) CONTROLLER_PKGS := $(shell echo cmd/workflow-controller && go list -f '{{ join .Deps "\n" }}' ./cmd/workflow-controller/ | grep 'argoproj/argo-workflows/v3/' | cut -c 39-) -E2E_EXECUTOR ?= emissary TYPES := $(shell find pkg/apis/workflow/v1alpha1 -type f -name '*.go' -not -name openapi_generated.go -not -name '*generated*' -not -name '*test.go') CRDS := $(shell find manifests/base/crds -type f -name 'argoproj.io_*.yaml') SWAGGER_FILES := pkg/apiclient/_.primary.swagger.json \ @@ -110,12 +123,11 @@ SWAGGER_FILES := pkg/apiclient/_.primary.swagger.json \ pkg/apiclient/event/event.swagger.json \ pkg/apiclient/eventsource/eventsource.swagger.json \ pkg/apiclient/info/info.swagger.json \ - pkg/apiclient/pipeline/pipeline.swagger.json \ pkg/apiclient/sensor/sensor.swagger.json \ pkg/apiclient/workflow/workflow.swagger.json \ pkg/apiclient/workflowarchive/workflow-archive.swagger.json \ pkg/apiclient/workflowtemplate/workflow-template.swagger.json -PROTO_BINARIES := $(GOPATH)/bin/protoc-gen-gogo $(GOPATH)/bin/protoc-gen-gogofast $(GOPATH)/bin/goimports $(GOPATH)/bin/protoc-gen-grpc-gateway $(GOPATH)/bin/protoc-gen-swagger +PROTO_BINARIES := $(GOPATH)/bin/protoc-gen-gogo $(GOPATH)/bin/protoc-gen-gogofast $(GOPATH)/bin/goimports $(GOPATH)/bin/protoc-gen-grpc-gateway $(GOPATH)/bin/protoc-gen-swagger /usr/local/bin/clang-format # protoc,my.proto define protoc @@ -136,12 +148,6 @@ define protoc endef -.PHONY: build -build: clis images - -.PHONY: images -images: argocli-image argoexec-image workflow-controller-image - # cli .PHONY: cli @@ -167,36 +173,37 @@ server/static/files.go: endif dist/argo-linux-amd64: GOARGS = GOOS=linux GOARCH=amd64 -dist/argo-darwin-amd64: GOARGS = GOOS=darwin GOARCH=amd64 -dist/argo-windows-amd64: GOARGS = GOOS=windows GOARCH=amd64 dist/argo-linux-arm64: GOARGS = GOOS=linux GOARCH=arm64 dist/argo-linux-ppc64le: GOARGS = GOOS=linux GOARCH=ppc64le dist/argo-linux-s390x: GOARGS = GOOS=linux GOARCH=s390x +dist/argo-darwin-amd64: GOARGS = GOOS=darwin GOARCH=amd64 +dist/argo-darwin-arm64: GOARGS = GOOS=darwin GOARCH=arm64 +dist/argo-windows-amd64: GOARGS = GOOS=windows GOARCH=amd64 dist/argo-windows-%.gz: dist/argo-windows-% gzip --force --keep dist/argo-windows-$*.exe dist/argo-windows-%: server/static/files.go $(CLI_PKGS) go.sum - CGO_ENABLED=0 $(GOARGS) go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@.exe ./cmd/argo + CGO_ENABLED=0 $(GOARGS) go build -v -gcflags '${GCFLAGS}' -ldflags '${LDFLAGS} -extldflags -static' -o $@.exe ./cmd/argo dist/argo-%.gz: dist/argo-% gzip --force --keep dist/argo-$* dist/argo-%: server/static/files.go $(CLI_PKGS) go.sum - CGO_ENABLED=0 $(GOARGS) go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argo + CGO_ENABLED=0 $(GOARGS) go build -v -gcflags '${GCFLAGS}' -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argo dist/argo: server/static/files.go $(CLI_PKGS) go.sum ifeq ($(shell uname -s),Darwin) # if local, then build fast: use CGO and dynamic-linking - go build -v -ldflags '${LDFLAGS}' -o $@ ./cmd/argo + go build -v -gcflags '${GCFLAGS}' -ldflags '${LDFLAGS}' -o $@ ./cmd/argo else - CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argo + CGO_ENABLED=0 go build -gcflags '${GCFLAGS}' -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argo endif argocli-image: .PHONY: clis -clis: dist/argo-linux-amd64.gz dist/argo-linux-arm64.gz dist/argo-linux-ppc64le.gz dist/argo-linux-s390x.gz dist/argo-darwin-amd64.gz dist/argo-windows-amd64.gz +clis: dist/argo-linux-amd64.gz dist/argo-linux-arm64.gz dist/argo-linux-ppc64le.gz dist/argo-linux-s390x.gz dist/argo-darwin-amd64.gz dist/argo-darwin-arm64.gz dist/argo-windows-amd64.gz # controller @@ -206,9 +213,9 @@ controller: dist/workflow-controller dist/workflow-controller: $(CONTROLLER_PKGS) go.sum ifeq ($(shell uname -s),Darwin) # if local, then build fast: use CGO and dynamic-linking - go build -v -ldflags '${LDFLAGS}' -o $@ ./cmd/workflow-controller + go build -gcflags '${GCFLAGS}' -v -ldflags '${LDFLAGS}' -o $@ ./cmd/workflow-controller else - CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/workflow-controller + CGO_ENABLED=0 go build -gcflags '${GCFLAGS}' -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/workflow-controller endif workflow-controller-image: @@ -217,9 +224,9 @@ workflow-controller-image: dist/argoexec: $(ARGOEXEC_PKGS) go.sum ifeq ($(shell uname -s),Darwin) - CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argoexec + CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -gcflags '${GCFLAGS}' -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argoexec else - CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argoexec + CGO_ENABLED=0 go build -v -gcflags '${GCFLAGS}' -ldflags '${LDFLAGS} -extldflags -static' -o $@ ./cmd/argoexec endif argoexec-image: @@ -236,7 +243,8 @@ argoexec-image: if [ $(DOCKER_PUSH) = true ] && [ $(IMAGE_NAMESPACE) != argoproj ] ; then docker push $(IMAGE_NAMESPACE)/$*:$(VERSION) ; fi .PHONY: codegen -codegen: types swagger docs manifests +codegen: types swagger manifests $(GOPATH)/bin/mockery docs/fields.md docs/cli/argo.md + go generate ./... make --directory sdks/java generate make --directory sdks/python generate @@ -258,7 +266,6 @@ swagger: \ pkg/apiclient/eventsource/eventsource.swagger.json \ pkg/apiclient/info/info.swagger.json \ pkg/apiclient/sensor/sensor.swagger.json \ - pkg/apiclient/pipeline/pipeline.swagger.json \ pkg/apiclient/workflow/workflow.swagger.json \ pkg/apiclient/workflowarchive/workflow-archive.swagger.json \ pkg/apiclient/workflowtemplate/workflow-template.swagger.json \ @@ -267,19 +274,9 @@ swagger: \ api/openapi-spec/swagger.json \ api/jsonschema/schema.json -.PHONY: docs -docs: \ - docs/fields.md \ - docs/cli/argo.md \ - $(GOPATH)/bin/mockery - rm -Rf vendor v3 - go mod tidy - # `go generate ./...` takes around 10s, so we only run on specific packages. - go generate ./persist/sqldb ./pkg/plugins ./pkg/apiclient/workflow ./server/auth ./server/auth/sso ./workflow/executor - ./hack/check-env-doc.sh $(GOPATH)/bin/mockery: - go install github.com/vektra/mockery/v2@v2.9.4 + go install github.com/vektra/mockery/v2@v2.10.0 $(GOPATH)/bin/controller-gen: go install sigs.k8s.io/controller-tools/cmd/controller-gen@v0.4.1 $(GOPATH)/bin/go-to-protobuf: @@ -301,9 +298,19 @@ $(GOPATH)/bin/swagger: $(GOPATH)/bin/goimports: go install golang.org/x/tools/cmd/goimports@v0.1.7 +/usr/local/bin/clang-format: +ifeq ($(shell uname),Darwin) + brew install clang-format +else + sudo apt-get install clang-format +endif + pkg/apis/workflow/v1alpha1/generated.proto: $(GOPATH)/bin/go-to-protobuf $(PROTO_BINARIES) $(TYPES) $(GOPATH)/src/github.com/gogo/protobuf # These files are generated on a v3/ folder by the tool. Link them to the root folder [ -e ./v3 ] || ln -s . v3 + # Format proto files. Formatting changes generated code, so we do it here, rather that at lint time. + # Why clang-format? Google uses it. + find pkg/apiclient -name '*.proto'|xargs clang-format -i $(GOPATH)/bin/go-to-protobuf \ --go-header-file=./hack/custom-boilerplate.go.txt \ --packages=github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1 \ @@ -333,9 +340,6 @@ pkg/apiclient/info/info.swagger.json: $(PROTO_BINARIES) $(TYPES) pkg/apiclient/i pkg/apiclient/sensor/sensor.swagger.json: $(PROTO_BINARIES) $(TYPES) pkg/apiclient/sensor/sensor.proto $(call protoc,pkg/apiclient/sensor/sensor.proto) -pkg/apiclient/pipeline/pipeline.swagger.json: $(PROTO_BINARIES) $(TYPES) pkg/apiclient/pipeline/pipeline.proto - $(call protoc,pkg/apiclient/pipeline/pipeline.proto) - pkg/apiclient/workflow/workflow.swagger.json: $(PROTO_BINARIES) $(TYPES) pkg/apiclient/workflow/workflow.proto $(call protoc,pkg/apiclient/workflow/workflow.proto) @@ -349,6 +353,7 @@ pkg/apiclient/workflowtemplate/workflow-template.swagger.json: $(PROTO_BINARIES) manifests/base/crds/full/argoproj.io_workflows.yaml: $(GOPATH)/bin/controller-gen $(TYPES) ./hack/crdgen.sh ./hack/crds.go ./hack/crdgen.sh +.PHONY: manifests manifests: \ manifests/install.yaml \ manifests/namespace-install.yaml \ @@ -361,14 +366,23 @@ manifests: \ dist/manifests/quick-start-mysql.yaml \ dist/manifests/quick-start-postgres.yaml +.PHONY: manifests/install.yaml manifests/install.yaml: /dev/null kubectl kustomize --load-restrictor=LoadRestrictionsNone manifests/cluster-install | ./hack/auto-gen-msg.sh > manifests/install.yaml + +.PHONY: manifests/namespace-install.yaml manifests/namespace-install.yaml: /dev/null kubectl kustomize --load-restrictor=LoadRestrictionsNone manifests/namespace-install | ./hack/auto-gen-msg.sh > manifests/namespace-install.yaml + +.PHONY: manifests/quick-start-minimal.yaml manifests/quick-start-minimal.yaml: /dev/null kubectl kustomize --load-restrictor=LoadRestrictionsNone manifests/quick-start/minimal | ./hack/auto-gen-msg.sh > manifests/quick-start-minimal.yaml + +.PHONY: manifests/quick-start-mysql.yaml manifests/quick-start-mysql.yaml: /dev/null kubectl kustomize --load-restrictor=LoadRestrictionsNone manifests/quick-start/mysql | ./hack/auto-gen-msg.sh > manifests/quick-start-mysql.yaml + +.PHONY: manifests/quick-start-postgres.yaml manifests/quick-start-postgres.yaml: /dev/null kubectl kustomize --load-restrictor=LoadRestrictionsNone manifests/quick-start/postgres | ./hack/auto-gen-msg.sh > manifests/quick-start-postgres.yaml @@ -379,33 +393,37 @@ dist/manifests/%: manifests/% # lint/test/etc $(GOPATH)/bin/golangci-lint: - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b `go env GOPATH`/bin v1.42.0 + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b `go env GOPATH`/bin v1.47.1 .PHONY: lint lint: server/static/files.go $(GOPATH)/bin/golangci-lint rm -Rf v3 vendor + # If you're using `woc.wf.Spec` or `woc.execWf.Status` your code probably won't work with WorkflowTemplate. + # * Change `woc.wf.Spec` to `woc.execWf.Spec`. + # * Change `woc.execWf.Status` to `woc.wf.Status`. + @awk '(/woc.wf.Spec/ || /woc.execWf.Status/) && !/not-woc-misuse/ {print FILENAME ":" FNR "\t" $0 ; exit 1}' $(shell find workflow/controller -type f -name '*.go' -not -name '*test*') # Tidy Go modules go mod tidy # Lint Go files $(GOPATH)/bin/golangci-lint run --fix --verbose + # Lint the UI + if [ -e ui/node_modules ]; then yarn --cwd ui lint ; fi # for local we have a faster target that prints to stdout, does not use json, and can cache because it has no coverage .PHONY: test -test: server/static/files.go dist/argosay +test: server/static/files.go go build ./... env KUBECONFIG=/dev/null $(GOTEST) ./... + # marker file, based on it's modification time, we know how long ago this target was run + @mkdir -p dist + touch dist/test .PHONY: install install: githooks kubectl get ns $(KUBE_NAMESPACE) || kubectl create ns $(KUBE_NAMESPACE) kubectl config set-context --current --namespace=$(KUBE_NAMESPACE) - @echo "installing PROFILE=$(PROFILE), E2E_EXECUTOR=$(E2E_EXECUTOR)" + @echo "installing PROFILE=$(PROFILE)" kubectl kustomize --load-restrictor=LoadRestrictionsNone test/e2e/manifests/$(PROFILE) | sed 's|quay.io/argoproj/|$(IMAGE_NAMESPACE)/|' | sed 's/namespace: argo/namespace: $(KUBE_NAMESPACE)/' | kubectl -n $(KUBE_NAMESPACE) apply --prune -l app.kubernetes.io/part-of=argo -f - -ifneq ($(E2E_EXECUTOR),emissary) - # only change the executor from the default it we need to - kubectl patch cm/workflow-controller-configmap -p "{\"data\": {\"containerRuntimeExecutor\": \"$(E2E_EXECUTOR)\"}}" - kubectl apply -f manifests/quick-start/base/executor/$(E2E_EXECUTOR) -endif ifeq ($(PROFILE),stress) kubectl -n $(KUBE_NAMESPACE) apply -f test/stress/massive-workflow.yaml endif @@ -413,6 +431,9 @@ ifeq ($(RUN_MODE),kubernetes) kubectl -n $(KUBE_NAMESPACE) scale deploy/workflow-controller --replicas 1 kubectl -n $(KUBE_NAMESPACE) scale deploy/argo-server --replicas 1 endif +ifeq ($(AZURE),true) + kubectl -n $(KUBE_NAMESPACE) apply -f test/e2e/azure/deploy-azurite.yaml +endif .PHONY: argosay argosay: @@ -428,17 +449,8 @@ dist/argosay: mkdir -p dist cp test/e2e/images/argosay/v2/argosay dist/ -.PHONY: pull-images -pull-images: - docker pull golang:1.17 - docker pull debian:10.7-slim - docker pull mysql:8 - docker pull argoproj/argosay:v1 - docker pull argoproj/argosay:v2 - docker pull python:alpine3.6 - $(GOPATH)/bin/goreman: - go install github.com/mattn/goreman@v0.3.7 + go install github.com/mattn/goreman@v0.3.11 .PHONY: start ifeq ($(RUN_MODE),local) @@ -450,7 +462,13 @@ endif else start: install endif - @echo "starting STATIC_FILES=$(STATIC_FILES) (DEV_BRANCH=$(DEV_BRANCH), GIT_BRANCH=$(GIT_BRANCH)), AUTH_MODE=$(AUTH_MODE), RUN_MODE=$(RUN_MODE), MANAGED_NAMESPACE=$(MANAGED_NAMESPACE)" + @echo "starting STATIC_FILES=$(STATIC_FILES) (DEV_BRANCH=$(DEV_BRANCH), GIT_BRANCH=$(GIT_BRANCH)), AUTH_MODE=$(AUTH_MODE), RUN_MODE=$(RUN_MODE), MANAGED_NAMESPACE=$(MANAGED_NAMESPACE), AZURE=$(AZURE)" +ifneq ($(CTRL),true) + @echo "⚠️️ not starting controller. If you want to test the controller, use 'make start CTRL=true' to start it" +endif +ifneq ($(LOGS),true) + @echo "⚠️️ not starting logs. If you want to tail logs, use 'make start LOGS=true' to start it" +endif ifneq ($(API),true) @echo "⚠️️ not starting API. If you want to test the API, use 'make start API=true' to start it" endif @@ -462,22 +480,25 @@ ifneq ($(PLUGINS),true) endif # Check dex, minio, postgres and mysql are in hosts file ifeq ($(AUTH_MODE),sso) - grep '127.0.0.1[[:blank:]]*dex' /etc/hosts + grep '127.0.0.1.*dex' /etc/hosts endif - grep '127.0.0.1[[:blank:]]*minio' /etc/hosts - grep '127.0.0.1[[:blank:]]*postgres' /etc/hosts - grep '127.0.0.1[[:blank:]]*mysql' /etc/hosts +ifeq ($(AZURE),true) + grep '127.0.0.1.*azurite' /etc/hosts +endif + grep '127.0.0.1.*minio' /etc/hosts + grep '127.0.0.1.*postgres' /etc/hosts + grep '127.0.0.1.*mysql' /etc/hosts ./hack/port-forward.sh ifeq ($(RUN_MODE),local) - env DEFAULT_REQUEUE_TIME=$(DEFAULT_REQUEUE_TIME) SECURE=$(SECURE) ALWAYS_OFFLOAD_NODE_STATUS=$(ALWAYS_OFFLOAD_NODE_STATUS) LOG_LEVEL=$(LOG_LEVEL) UPPERIO_DB_DEBUG=$(UPPERIO_DB_DEBUG) IMAGE_NAMESPACE=$(IMAGE_NAMESPACE) VERSION=$(VERSION) AUTH_MODE=$(AUTH_MODE) NAMESPACED=$(NAMESPACED) NAMESPACE=$(KUBE_NAMESPACE) MANAGED_NAMESPACE=$(MANAGED_NAMESPACE) UI=$(UI) API=$(API) PLUGINS=$(PLUGINS) $(GOPATH)/bin/goreman -set-ports=false -logtime=false start $(shell if [ -z $GREP_LOGS ]; then echo; else echo "| grep \"$(GREP_LOGS)\""; fi) + env DEFAULT_REQUEUE_TIME=$(DEFAULT_REQUEUE_TIME) SECURE=$(SECURE) ALWAYS_OFFLOAD_NODE_STATUS=$(ALWAYS_OFFLOAD_NODE_STATUS) LOG_LEVEL=$(LOG_LEVEL) UPPERIO_DB_DEBUG=$(UPPERIO_DB_DEBUG) IMAGE_NAMESPACE=$(IMAGE_NAMESPACE) VERSION=$(VERSION) AUTH_MODE=$(AUTH_MODE) NAMESPACED=$(NAMESPACED) NAMESPACE=$(KUBE_NAMESPACE) MANAGED_NAMESPACE=$(MANAGED_NAMESPACE) CTRL=$(CTRL) LOGS=$(LOGS) UI=$(UI) API=$(API) PLUGINS=$(PLUGINS) $(GOPATH)/bin/goreman -set-ports=false -logtime=false start $(shell if [ -z $GREP_LOGS ]; then echo; else echo "| grep \"$(GREP_LOGS)\""; fi) endif $(GOPATH)/bin/stern: - ./hack/recurl.sh $(GOPATH)/bin/stern https://github.com/wercker/stern/releases/download/1.11.0/stern_`uname -s|tr '[:upper:]' '[:lower:]'`_amd64 + go install github.com/stern/stern@latest .PHONY: logs logs: $(GOPATH)/bin/stern - stern -l workflows.argoproj.io/workflow 2>&1 + $(GOPATH)/bin/stern -l workflows.argoproj.io/workflow 2>&1 .PHONY: wait wait: @@ -499,12 +520,20 @@ mysql-cli: test-cli: ./dist/argo test-%: - go test -v -timeout 15m -count 1 --tags $* -parallel 10 ./test/e2e + go test -failfast -v -timeout $(E2E_SUITE_TIMEOUT) -count 1 --tags $* -parallel $(E2E_PARALLEL) ./test/e2e .PHONY: test-examples test-examples: ./hack/test-examples.sh +.PHONY: test-%-sdk +test-%-sdk: + make --directory sdks/$* install test -B + +Test%: + go test -failfast -v -timeout $(E2E_SUITE_TIMEOUT) -count 1 --tags api,cli,cron,executor,examples,corefunctional,functional,plugins -parallel $(E2E_PARALLEL) ./test/e2e -run='.*/$*' + + # clean .PHONY: clean @@ -583,24 +612,74 @@ docs/fields.md: api/openapi-spec/swagger.json $(shell find examples -type f) hac docs/cli/argo.md: $(CLI_PKGS) go.sum server/static/files.go hack/cli/main.go go run ./hack/cli -# pre-push +# docs + +/usr/local/bin/mdspell: + npm i -g markdown-spellcheck + +.PHONY: docs-spellcheck +docs-spellcheck: /usr/local/bin/mdspell + # check docs for spelling mistakes + mdspell --ignore-numbers --ignore-acronyms --en-us --no-suggestions --report $(shell find docs -name '*.md' -not -name upgrading.md -not -name fields.md -not -name upgrading.md -not -name executor_swagger.md -not -path '*/cli/*') + +/usr/local/bin/markdown-link-check: + npm i -g markdown-link-check + +.PHONY: docs-linkcheck +docs-linkcheck: /usr/local/bin/markdown-link-check + # check docs for broken links + markdown-link-check -q -c .mlc_config.json $(shell find docs -name '*.md' -not -name fields.md -not -name executor_swagger.md) + +/usr/local/bin/markdownlint: + npm i -g markdownlint-cli -.git/hooks/commit-msg: hack/git/hooks/commit-msg - cp -v hack/git/hooks/commit-msg .git/hooks/commit-msg +.PHONY: docs-lint +docs-lint: /usr/local/bin/markdownlint + # lint docs + markdownlint docs --fix --ignore docs/fields.md --ignore docs/executor_swagger.md --ignore docs/cli --ignore docs/walk-through/the-structure-of-workflow-specs.md + +/usr/local/bin/mkdocs: + python -m pip install mkdocs==1.2.4 mkdocs_material==8.1.9 mkdocs-spellcheck==0.2.1 + +.PHONY: docs +docs: /usr/local/bin/mkdocs \ + docs-spellcheck \ + docs-lint \ + docs-linkcheck + # check environment-variables.md contains all variables mentioned in the code + ./hack/check-env-doc.sh + # check all docs are listed in mkdocs.yml + ./hack/check-mkdocs.sh + # build the docs + mkdocs build + # fix the fields.md document + go run -tags fields ./hack parseexamples + # tell the user the fastest way to edit docs + @echo "ℹ️ If you want to preview you docs, open site/index.html. If you want to edit them with hot-reload, run 'make docs-serve' to start mkdocs on port 8000" + +.PHONY: docs-serve +docs-serve: docs + mkdocs serve + +# pre-commit checks + +.git/hooks/%: hack/git/hooks/% + @mkdir -p .git/hooks + cp hack/git/hooks/$* .git/hooks/$* .PHONY: githooks -githooks: .git/hooks/commit-msg +githooks: .git/hooks/pre-commit .git/hooks/commit-msg .PHONY: pre-commit -pre-commit: githooks codegen lint +pre-commit: codegen lint docs + # marker file, based on it's modification time, we know how long ago this target was run + touch dist/pre-commit + +# release release-notes: /dev/null version=$(VERSION) envsubst < hack/release-notes.md > release-notes -.PHONY: parse-examples -parse-examples: - go run -tags fields ./hack parseexamples - .PHONY: checksums checksums: for f in ./dist/argo-*.gz; do openssl dgst -sha256 "$$f" | awk ' { print $$2 }' > "$$f".sha256 ; done diff --git a/OWNERS b/OWNERS index 43af4181e280..1b1c0889d153 100644 --- a/OWNERS +++ b/OWNERS @@ -1,7 +1,9 @@ owners: - alexec +- sarabala1979 reviewers: +- tczhao - xianlubird approvers: @@ -10,6 +12,5 @@ approvers: - dtaniwaki - edlee2121 - jessesuen -- sarabala1979 - simster7 - terrytangyuan diff --git a/Procfile b/Procfile index 7f136708af75..bdf3714af7bd 100644 --- a/Procfile +++ b/Procfile @@ -1,4 +1,4 @@ -controller: ./hack/free-port.sh 9090 && ARGO_EXECUTOR_PLUGINS=${PLUGINS} ARGO_REMOVE_PVC_PROTECTION_FINALIZER=true ARGO_PROGRESS_PATCH_TICK_DURATION=7s DEFAULT_REQUEUE_TIME=${DEFAULT_REQUEUE_TIME} LEADER_ELECTION_IDENTITY=local ALWAYS_OFFLOAD_NODE_STATUS=${ALWAYS_OFFLOAD_NODE_STATUS} OFFLOAD_NODE_STATUS_TTL=30s WORKFLOW_GC_PERIOD=30s UPPERIO_DB_DEBUG=${UPPERIO_DB_DEBUG} ARCHIVED_WORKFLOW_GC_PERIOD=30s ./dist/workflow-controller --executor-image ${IMAGE_NAMESPACE}/argoexec:${VERSION} --namespaced=${NAMESPACED} --namespace ${NAMESPACE} --managed-namespace=${MANAGED_NAMESPACE} --loglevel ${LOG_LEVEL} -argo-server: ./hack/free-port.sh 2746 && [ "$API" = "true" ] && UPPERIO_DB_DEBUG=${UPPERIO_DB_DEBUG} ./dist/argo --loglevel ${LOG_LEVEL} server --namespaced=${NAMESPACED} --namespace ${NAMESPACE} --auth-mode ${AUTH_MODE} --secure=$SECURE --x-frame-options=SAMEORIGIN -ui: ./hack/free-port.sh 8080 && [ "$UI" = "true" ] && yarn --cwd ui install && yarn --cwd ui start -logs: make logs \ No newline at end of file +controller: [ "$CTRL" = "true" ] && ./hack/free-port.sh 9090 && ARGO_EXECUTOR_PLUGINS=${PLUGINS} ARGO_REMOVE_PVC_PROTECTION_FINALIZER=true ARGO_PROGRESS_PATCH_TICK_DURATION=7s DEFAULT_REQUEUE_TIME=${DEFAULT_REQUEUE_TIME} LEADER_ELECTION_IDENTITY=local ALWAYS_OFFLOAD_NODE_STATUS=${ALWAYS_OFFLOAD_NODE_STATUS} OFFLOAD_NODE_STATUS_TTL=30s WORKFLOW_GC_PERIOD=30s UPPERIO_DB_DEBUG=${UPPERIO_DB_DEBUG} ARCHIVED_WORKFLOW_GC_PERIOD=30s ./dist/workflow-controller --executor-image ${IMAGE_NAMESPACE}/argoexec:${VERSION} --namespaced=${NAMESPACED} --namespace ${NAMESPACE} --managed-namespace=${MANAGED_NAMESPACE} --loglevel ${LOG_LEVEL} +argo-server: [ "$API" = "true" ] &&./hack/free-port.sh 2746 && UPPERIO_DB_DEBUG=${UPPERIO_DB_DEBUG} ./dist/argo --loglevel ${LOG_LEVEL} server --namespaced=${NAMESPACED} --namespace ${NAMESPACE} --auth-mode ${AUTH_MODE} --secure=$SECURE --x-frame-options=SAMEORIGIN +ui: [ "$UI" = "true" ] && ./hack/free-port.sh 8080 && yarn --cwd ui install && yarn --cwd ui start +logs: [ "$LOGS" = "true" ] && make logs \ No newline at end of file diff --git a/README.md b/README.md index 58fb44bd29e3..3034c42f9c04 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,4 @@ -[![slack](https://img.shields.io/badge/slack-argoproj-brightgreen.svg?logo=slack)](https://argoproj.github.io/community/join-slack) -[![CI](https://github.com/argoproj/argo-workflows/workflows/CI/badge.svg)](https://github.com/argoproj/argo-workflows/actions?query=event%3Apush+branch%3Amaster) -[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3830/badge)](https://bestpractices.coreinfrastructure.org/projects/3830) -[![Twitter Follow](https://img.shields.io/twitter/follow/argoproj?style=social)](https://twitter.com/argoproj) +[![slack](https://img.shields.io/badge/slack-argoproj-brightgreen.svg?logo=slack)](https://argoproj.github.io/community/join-slack) [![CI](https://github.com/argoproj/argo-workflows/workflows/CI/badge.svg)](https://github.com/argoproj/argo-workflows/actions?query=event%3Apush+branch%3Amaster) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3830/badge)](https://bestpractices.coreinfrastructure.org/projects/3830) [![Twitter Follow](https://img.shields.io/twitter/follow/argoproj?style=social)](https://twitter.com/argoproj) ## What is Argo Workflows? @@ -43,30 +40,25 @@ Argo is a [Cloud Native Computing Foundation (CNCF)](https://cncf.io/) hosted pr ![Screenshot](docs/assets/screenshot.png) +## Documentation + +[View the docs](https://argoproj.github.io/argo-workflows/) + ## Ecosystem -![Ecosystem](docs/assets/ecosystem.png) - -[Argo Events](https://github.com/argoproj/argo-events) -| -[Argo Workflows Catalog](https://argoproj-labs.github.io/argo-workflows-catalog/) -| -[Couler](https://github.com/couler-proj/couler) -| -[Katib](https://github.com/kubeflow/katib) -| -[Kedro](https://kedro.readthedocs.io/en/stable/) -| -[Kubeflow Pipelines](https://github.com/kubeflow/pipelines) -| -[Onepanel](https://www.onepanel.ai/) -| -[Ploomber](https://github.com/ploomber/ploomber) -| -[Seldon](https://github.com/SeldonIO/seldon-core) -| -[SQLFlow](https://github.com/sql-machine-learning/sqlflow) +Just some of the projects that use or rely on Argo Workflows: +* [Argo Events](https://github.com/argoproj/argo-events) +* [Couler](https://github.com/couler-proj/couler) +* [Katib](https://github.com/kubeflow/katib) +* [Kedro](https://kedro.readthedocs.io/en/stable/) +* [Kubeflow Pipelines](https://github.com/kubeflow/pipelines) +* [Netflix Metaflow](https://metaflow.org) +* [Onepanel](https://www.onepanel.ai/) +* [Ploomber](https://github.com/ploomber/ploomber) +* [Seldon](https://github.com/SeldonIO/seldon-core) +* [SQLFlow](https://github.com/sql-machine-learning/sqlflow) +* [Orchest](https://github.com/orchest/orchest/) ## Client Libraries @@ -74,11 +66,15 @@ Check out our [Java, Golang and Python clients](docs/client-libraries.md). ## Quickstart +The following commands install Argo Workflows as well as some commmonly used components: + ```bash -kubectl create namespace argo -kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/install.yaml +kubectl create ns argo +kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start-postgres.yaml ``` +> **These manifests are intended to help you get started quickly. They contain hard-coded passwords that are publicly available and are not suitable in production.** + ## Who uses Argo Workflows? [Official Argo Workflows user list](USERS.md) @@ -92,7 +88,7 @@ kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo-workflo ## Features * UI to visualize and manage Workflows -* Artifact support (S3, Artifactory, Alibaba Cloud OSS, HTTP, Git, GCS, raw) +* Artifact support (S3, Artifactory, Alibaba Cloud OSS, Azure Blob Storage, HTTP, Git, GCS, raw) * Workflow templating to store commonly used Workflows in the cluster * Archiving Workflows after executing for later access * Scheduled workflows using cron diff --git a/USERS.md b/USERS.md index d15e43876f57..3a053067d915 100644 --- a/USERS.md +++ b/USERS.md @@ -1,6 +1,6 @@ ## Argo Workflows User Community Surveys & Feedback -Please find [here](https://github.com/argoproj/argoproj/blob/master/community/user_surveys/ArgoWorkflows2020SurveySummary.pdf) Argo Workflows user community 2020 survey results. +Please find [here](https://github.com/argoproj/argoproj/blob/master/community/user_surveys/ArgoWorkflows2020SurveySummary.pdf) Argo Workflows user community 2020 survey results and the 2021 results [here](https://blog.argoproj.io/argo-workflows-2021-survey-results-d6fa890030ee?gi=857daaa1faa9). ## Who uses Argo Workflows? @@ -24,9 +24,12 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [Ant Group](https://www.antgroup.com/) 1. [AppDirect](https://www.appdirect.com/) 1. [Arabesque](https://www.arabesque.com/) +1. [Argonaut](https://www.argonaut.dev/) +1. [ArthurAI](https://arthur.ai/) 1. [Astraea](https://astraea.earth/) 1. [BasisAI](https://basis-ai.com/) 1. [BEI.RE](https://www.bei.re/) +1. [bimspot](https://bimspot.io) 1. [BioBox Analytics](https://biobox.io) 1. [BlackRock](https://www.blackrock.com/) 1. [Bloomberg](https://www.bloomberg.com/) @@ -43,13 +46,15 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [Cisco](https://www.cisco.com/) 1. [CloudSeeds](https://www.cloudseeds.de/) 1. [Codec](https://www.codec.ai/) +1. [Codefresh](https://www.codefresh.io/) 1. [Commodus Tech](https://www.commodus.tech) 1. [Concierge Render](https://www.conciergerender.com) 1. [Cookpad](https://cookpad.com/) 1. [CoreFiling](https://www.corefiling.com/) -1. [Cratejoy](https://www.cratejoy.com/) 1. [CoreWeave Cloud](https://www.coreweave.com) +1. [Cratejoy](https://www.cratejoy.com/) 1. [Cruise](https://getcruise.com/) +1. [CVision AI](https://www.cvisionai.com) 1. [CyberAgent](https://www.cyberagent.co.jp/en/) 1. [Cyrus Biotechnology](https://cyrusbio.com/) 1. [Data4Risk](https://www.data4risk.com/) @@ -57,12 +62,14 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [DataRobot](https://www.datarobot.com/) 1. [DataStax](https://www.datastax.com/) 1. [DDEV](https://www.ddev.com/) -1. [Devtron Labs](https://github.com/devtron-labs/devtron) 1. [DevSamurai](https://www.devsamurai.com/) -1. [Dyno Therapeutics](https://dynotx.com) +1. [Devtron Labs](https://github.com/devtron-labs/devtron) 1. [DLR](https://www.dlr.de/eoc/) +1. [Dyno Therapeutics](https://dynotx.com) 1. [EBSCO Information Services](https://www.ebsco.com/) +1. [Enso Finance](https://enso.finance/) 1. [Equinor](https://www.equinor.com/) +1. [Elastic](https://www.elastic.co/) 1. [Fairwinds](https://fairwinds.com/) 1. [FOLIO](http://corp.folio-sec.com/) 1. [FreeWheel](https://freewheel.com/) @@ -77,8 +84,10 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [Google](https://www.google.com/intl/en/about/our-company/) 1. [Graviti](https://www.graviti.com) 1. [Greenhouse](https://greenhouse.io) +1. [H2O.ai](https://h2o.ai/) 1. [Habx](https://www.habx.com/) 1. [Helio](https://helio.exchange) +1. [Hemisphere Digital](https://hemisphere.digital) 1. [HOVER](https://hover.to) 1. [HSBC](https://hsbc.com) 1. [IBM](https://ibm.com) @@ -115,19 +124,24 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [One Concern](https://oneconcern.com/) 1. [Onepanel](https://docs.onepanel.ai) 1. [Oracle](https://www.oracle.com/) +1. [Orchest](https://www.orchest.io/) 1. [OVH](https://www.ovh.com/) -1. [Peak AI](https://www.peak.ai/) +1. [PathAI](https://www.pathai.com) 1. [PDOK](https://www.pdok.nl/) +1. [Peak AI](https://www.peak.ai/) 1. [Pipekit](https://pipeit.io) +1. [Pismo](https://pismo.io/) 1. [Polarpoint.io](https://polarpoint.io) 1. [Pollination](https://pollination.cloud) 1. [Preferred Networks](https://www.preferred-networks.jp/en/) 1. [Promaton](https://www.promaton.com/) 1. [Prudential](https://www.prudential.com.sg/) 1. [Quantibio](http://quantibio.com/us/en/) +1. [QuantumBlack](https://quantumblack.com/) 1. [Raccoon Digital Marketing](https://raccoon.ag/) 1. [Ramboll Shair](https://ramboll-shair.com/) 1. [Ravelin](https://www.ravelin.com/) +1. [Reco](https://reco.ai) 1. [Red Hat](https://www.redhat.com/en) 1. [Reserved AI](https://reserved.ai/) 1. [Riskified](https://www.riskified.com) @@ -152,21 +166,21 @@ Currently, the following organizations are **officially** using Argo Workflows: 1. [Threekit](https://www.threekit.com/) 1. [Tiger Analytics](https://www.tigeranalytics.com/) 1. [Tradeshift](https://tradeshift.com/) +1. [Trendyol](https://trendyol.com) 1. [Tulip](https://tulip.com/) 1. [Ubie](https://ubie.life/) 1. [UFirstGroup](https://www.ufirstgroup.com) 1. [Vispera](https://www.vispera.co) 1. [VMware](https://www.vmware.com/) +1. [Voyager](https://investvoyager.com/) 1. [Wavefront](https://www.wavefront.com/) 1. [Wellcome Trust](https://wellcome.ac.uk/) 1. [WooliesX](https://wooliesx.com.au/) 1. [Woolworths Group](https://www.woolworthsgroup.com.au/) 1. [Workiva](https://www.workiva.com/) -1. [Voyager](https://investvoyager.com/) +1. [Xueqiu](https://www.xueqiu.com/) 1. [Yubo](https://www.yubo.live/) 1. [Zhihu](https://www.zhihu.com/) -1. [Xueqiu](https://www.xueqiu.com/) -1. [bimspot](https://bimspot.io) ### Projects Using Argo @@ -175,5 +189,8 @@ In addition, the following projects are **officially** using Argo Workflows: 1. [Couler](https://github.com/couler-proj/couler) 1. [Hera Workflows](https://github.com/argoproj-labs/hera-workflows) 1. [Kubeflow](https://www.kubeflow.org/) +1. [Metaflow](https://www.metaflow.org) 1. [Onepanel](https://github.com/onepanelio/onepanel) 1. [SQLFlow](https://github.com/sql-machine-learning/sqlflow) +1. [BisQue](https://github.com/UCSB-VRL/bisqueUCSB) +1. [Tator](https://www.tator.io) diff --git a/api/jsonschema/schema.json b/api/jsonschema/schema.json index 6b23d91bafd0..1c1955b433b3 100644 --- a/api/jsonschema/schema.json +++ b/api/jsonschema/schema.json @@ -70,1167 +70,6 @@ }, "type": "object" }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSCredentials": { - "properties": { - "accessKeyId": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "secretAccessKey": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "sessionToken": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSEndpoint": { - "properties": { - "url": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep": { - "properties": { - "resources": { - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements", - "title": "+kubebuilder:default={limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource": { - "properties": { - "awsElasticBlockStore": { - "$ref": "#/definitions/io.k8s.api.core.v1.AWSElasticBlockStoreVolumeSource", - "title": "AWSElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore\n+optional" - }, - "azureDisk": { - "$ref": "#/definitions/io.k8s.api.core.v1.AzureDiskVolumeSource", - "title": "AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.\n+optional" - }, - "azureFile": { - "$ref": "#/definitions/io.k8s.api.core.v1.AzureFileVolumeSource", - "title": "AzureFile represents an Azure File Service mount on the host and bind mount to the pod.\n+optional" - }, - "cephfs": { - "$ref": "#/definitions/io.k8s.api.core.v1.CephFSVolumeSource", - "title": "CephFS represents a Ceph FS mount on the host that shares a pod's lifetime\n+optional" - }, - "cinder": { - "$ref": "#/definitions/io.k8s.api.core.v1.CinderVolumeSource", - "title": "Cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.io.k8s.mysql-cinder-pd/README.md\n+optional" - }, - "configMap": { - "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapVolumeSource", - "title": "ConfigMap represents a configMap that should populate this volume\n+optional" - }, - "csi": { - "$ref": "#/definitions/io.k8s.api.core.v1.CSIVolumeSource", - "title": "CSI (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).\n+optional" - }, - "downwardAPI": { - "$ref": "#/definitions/io.k8s.api.core.v1.DownwardAPIVolumeSource", - "title": "DownwardAPI represents downward API about the pod that should populate this volume\n+optional" - }, - "emptyDir": { - "$ref": "#/definitions/io.k8s.api.core.v1.EmptyDirVolumeSource", - "title": "EmptyDir represents a temporary directory that shares a pod's lifetime.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir\n+optional" - }, - "ephemeral": { - "$ref": "#/definitions/io.k8s.api.core.v1.EphemeralVolumeSource", - "description": "Ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.\n\n+optional" - }, - "fc": { - "$ref": "#/definitions/io.k8s.api.core.v1.FCVolumeSource", - "title": "FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.\n+optional" - }, - "flexVolume": { - "$ref": "#/definitions/io.k8s.api.core.v1.FlexVolumeSource", - "title": "FlexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.\n+optional" - }, - "flocker": { - "$ref": "#/definitions/io.k8s.api.core.v1.FlockerVolumeSource", - "title": "Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running\n+optional" - }, - "gcePersistentDisk": { - "$ref": "#/definitions/io.k8s.api.core.v1.GCEPersistentDiskVolumeSource", - "title": "GCEPersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk\n+optional" - }, - "gitRepo": { - "$ref": "#/definitions/io.k8s.api.core.v1.GitRepoVolumeSource", - "title": "GitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.\n+optional" - }, - "glusterfs": { - "$ref": "#/definitions/io.k8s.api.core.v1.GlusterfsVolumeSource", - "title": "Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.io.k8s.volumes/glusterfs/README.md\n+optional" - }, - "hostPath": { - "$ref": "#/definitions/io.k8s.api.core.v1.HostPathVolumeSource", - "title": "HostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.\n+optional" - }, - "iscsi": { - "$ref": "#/definitions/io.k8s.api.core.v1.ISCSIVolumeSource", - "title": "ISCSI represents an ISCSI Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://examples.io.k8s.volumes/iscsi/README.md\n+optional" - }, - "nfs": { - "$ref": "#/definitions/io.k8s.api.core.v1.NFSVolumeSource", - "title": "NFS represents an NFS mount on the host that shares a pod's lifetime\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs\n+optional" - }, - "persistentVolumeClaim": { - "$ref": "#/definitions/io.k8s.api.core.v1.PersistentVolumeClaimVolumeSource", - "title": "PersistentVolumeClaimVolumeSource represents a reference to a\nPersistentVolumeClaim in the same namespace.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims\n+optional" - }, - "photonPersistentDisk": { - "$ref": "#/definitions/io.k8s.api.core.v1.PhotonPersistentDiskVolumeSource", - "title": "PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine" - }, - "portworxVolume": { - "$ref": "#/definitions/io.k8s.api.core.v1.PortworxVolumeSource", - "title": "PortworxVolume represents a portworx volume attached and mounted on kubelets host machine\n+optional" - }, - "projected": { - "$ref": "#/definitions/io.k8s.api.core.v1.ProjectedVolumeSource", - "title": "Items for all in one resources secrets, configmaps, and downward API" - }, - "quobyte": { - "$ref": "#/definitions/io.k8s.api.core.v1.QuobyteVolumeSource", - "title": "Quobyte represents a Quobyte mount on the host that shares a pod's lifetime\n+optional" - }, - "rbd": { - "$ref": "#/definitions/io.k8s.api.core.v1.RBDVolumeSource", - "title": "RBD represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.io.k8s.volumes/rbd/README.md\n+optional" - }, - "scaleIO": { - "$ref": "#/definitions/io.k8s.api.core.v1.ScaleIOVolumeSource", - "title": "ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.\n+optional" - }, - "secret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretVolumeSource", - "title": "Secret represents a secret that should populate this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret\n+optional" - }, - "storageos": { - "$ref": "#/definitions/io.k8s.api.core.v1.StorageOSVolumeSource", - "title": "StorageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.\n+optional" - }, - "vsphereVolume": { - "$ref": "#/definitions/io.k8s.api.core.v1.VsphereVirtualDiskVolumeSource", - "title": "VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine\n+optional" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Backoff": { - "properties": { - "FactorPercentage": { - "title": "+kubebuilder:default=200", - "type": "integer" - }, - "cap": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"0ms\"" - }, - "duration": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"100ms\"" - }, - "jitterPercentage": { - "title": "the amount of jitter per step, typically 10-20%, \u003e100% is valid, but strange\n+kubebuilder:default=10", - "type": "integer" - }, - "steps": { - "format": "uint64", - "title": "the number of backoff steps, zero means no retries\n+kubebuilder:default=20", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cat": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Code": { - "properties": { - "image": { - "description": "Image is used in preference to Runtime.", - "type": "string" - }, - "runtime": { - "type": "string" - }, - "source": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Container": { - "properties": { - "args": { - "items": { - "type": "string" - }, - "type": "array" - }, - "command": { - "items": { - "type": "string" - }, - "type": "array" - }, - "env": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" - }, - "type": "array" - }, - "image": { - "type": "string" - }, - "in": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Interface" - }, - "resources": { - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements" - }, - "volumeMounts": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.VolumeMount" - }, - "type": "array" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cron": { - "properties": { - "layout": { - "title": "+kubebuilder:default=\"2006-01-02T15:04:05Z07:00\"", - "type": "string" - }, - "schedule": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSource": { - "properties": { - "value": { - "type": "string" - }, - "valueFrom": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSourceFrom" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSourceFrom": { - "properties": { - "secretKeyRef": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSink": { - "properties": { - "actions": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLAction" - }, - "type": "array" - }, - "database": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSource": { - "properties": { - "commitInterval": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"5s\"" - }, - "database": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database" - }, - "initSchema": { - "title": "+kubebuilder:default=true", - "type": "boolean" - }, - "offsetColumn": { - "type": "string" - }, - "pollInterval": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"1s\"" - }, - "query": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database": { - "properties": { - "dataSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSource" - }, - "driver": { - "title": "+kubebuilder:default=default", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Dedupe": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "maxSize": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity", - "title": "MaxSize is the maximum number of entries to keep in the in-memory database used to store recent UIDs.\nLarger number mean bigger windows of time for dedupe, but greater memory usage.\n+kubebuilder:default=\"1M\"" - }, - "uid": { - "title": "+kubebuilder:default=\"sha1(msg)\"", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Expand": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Filter": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "expression": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Flatten": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Git": { - "properties": { - "branch": { - "title": "+kubebuilder:default=main", - "type": "string" - }, - "command": { - "items": { - "type": "string" - }, - "type": "array" - }, - "env": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" - }, - "type": "array" - }, - "image": { - "type": "string" - }, - "insecureIgnoreHostKey": { - "title": "InsecureIgnoreHostKey is the bool value for ignoring check for host key", - "type": "boolean" - }, - "passwordSecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "PasswordSecret is the secret selector to the repository password" - }, - "path": { - "description": "+kubebuilder:default=.", - "type": "string" - }, - "sshPrivateKeySecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "SSHPrivateKeySecret is the secret selector to the repository ssh private key" - }, - "url": { - "type": "string" - }, - "usernameSecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "UsernameSecret is the secret selector to the repository username" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Group": { - "properties": { - "endOfGroup": { - "type": "string" - }, - "format": { - "type": "string" - }, - "key": { - "type": "string" - }, - "storage": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Storage" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTP": { - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeader": { - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - }, - "valueFrom": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeaderSource" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeaderSource": { - "properties": { - "secretKeyRef": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSink": { - "properties": { - "headers": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeader" - }, - "type": "array" - }, - "insecureSkipVerify": { - "type": "boolean" - }, - "url": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSource": { - "properties": { - "serviceName": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Interface": { - "properties": { - "fifo": { - "type": "boolean" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTP" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream": { - "properties": { - "auth": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "natsUrl": { - "type": "string" - }, - "subject": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSink": { - "properties": { - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSource": { - "properties": { - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka": { - "properties": { - "kafkaConfig": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaConfig" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "topic": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaConfig": { - "properties": { - "brokers": { - "items": { - "type": "string" - }, - "type": "array" - }, - "maxMessageBytes": { - "type": "integer" - }, - "net": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaNET" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaNET": { - "properties": { - "sasl": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SASL" - }, - "tls": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.TLS" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSink": { - "properties": { - "acks": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.util.intstr.IntOrString", - "title": "+kubebuilder:default=\"all\"" - }, - "async": { - "type": "boolean" - }, - "batchSize": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity", - "title": "+kubebuilder:default=\"100Ki\"" - }, - "compressionType": { - "title": "+kubebuilder:default=\"lz4\"", - "type": "string" - }, - "enableIdempotence": { - "title": "+kubebuilder:default=true", - "type": "boolean" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka" - }, - "linger": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "maxInflight": { - "title": "The maximum number of messages to be in-flight when async.\n+kubebuilder:default=20", - "type": "integer" - }, - "messageTimeout": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"30s\"" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSource": { - "properties": { - "fetchMin": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity", - "title": "+kubebuilder:default=\"100Ki\"" - }, - "fetchWaitMax": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"500ms\"" - }, - "groupId": { - "description": "GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated.", - "type": "string" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka" - }, - "startOffset": { - "title": "+kubebuilder:default=Last", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Log": { - "properties": { - "truncate": { - "format": "uint64", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Map": { - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "expression": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Metadata": { - "properties": { - "annotations": { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - "labels": { - "additionalProperties": { - "type": "string" - }, - "type": "object" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth": { - "properties": { - "token": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline": { - "properties": { - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" - }, - "spec": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineSpec" - }, - "status": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineStatus" - } - }, - "title": "+kubebuilder:object:root=true\n+kubebuilder:resource:shortName=pl\n+kubebuilder:subresource:status\n+kubebuilder:printcolumn:name=\"Phase\",type=string,JSONPath=`.status.phase`\n+kubebuilder:printcolumn:name=\"Message\",type=string,JSONPath=`.status.message`", - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineList": { - "properties": { - "items": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline" - }, - "type": "array" - }, - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ListMeta" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineSpec": { - "properties": { - "deletionDelay": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"72h\"" - }, - "steps": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec" - }, - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "type": "array" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineStatus": { - "properties": { - "conditions": { - "items": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Condition" - }, - "type": "array" - }, - "lastUpdated": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - }, - "message": { - "type": "string" - }, - "phase": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3": { - "properties": { - "bucket": { - "type": "string" - }, - "credentials": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSCredentials" - }, - "endpoint": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSEndpoint" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "region": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Sink": { - "properties": { - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Source": { - "properties": { - "concurrency": { - "title": "+kubebuilder:default=1", - "type": "integer" - }, - "pollPeriod": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"1m\"" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SASL": { - "properties": { - "mechanism": { - "title": "SASLMechanism is the name of the enabled SASL mechanism.\nPossible values: OAUTHBEARER, PLAIN (defaults to PLAIN).\n+optional", - "type": "string" - }, - "password": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "Password for SASL/PLAIN authentication" - }, - "user": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "User is the authentication identity (authcid) to present for\nSASL/PLAIN or SASL/SCRAM authentication" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLAction": { - "properties": { - "onError": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - }, - "onRecordNotFound": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - }, - "statement": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement": { - "properties": { - "args": { - "items": { - "type": "string" - }, - "type": "array" - }, - "sql": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN": { - "properties": { - "auth": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth" - }, - "clusterId": { - "type": "string" - }, - "maxInflight": { - "title": "Max inflight messages when subscribing to the stan server, which means how many messages\nbetween commits, therefore potential duplicates during disruption\n+kubebuilder:default=20", - "type": "integer" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "natsMonitoringUrl": { - "type": "string" - }, - "natsUrl": { - "type": "string" - }, - "subject": { - "type": "string" - }, - "subjectPrefix": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Scale": { - "properties": { - "desiredReplicas": { - "description": "An expression to determine the number of replicas. Must evaluation to an `int`.", - "type": "string" - }, - "peekDelay": { - "title": "An expression to determine the delay for peeking. Maybe string or duration, e.g. `\"4m\"`\n+kubebuilder:default=\"defaultPeekDelay\"", - "type": "string" - }, - "scalingDelay": { - "title": "An expression to determine the delay for scaling. Maybe string or duration, e.g. `\"1m\"`\n+kubebuilder:default=\"defaultScalingDelay\"", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sidecar": { - "properties": { - "resources": { - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements", - "title": "+kubebuilder:default={limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sink": { - "properties": { - "db": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSink" - }, - "deadLetterQueue": { - "type": "boolean" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSink" - }, - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSink" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSink" - }, - "log": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Log" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Sink" - }, - "stan": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN" - }, - "volume": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSink" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Source": { - "properties": { - "cron": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cron" - }, - "db": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSource" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSource" - }, - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSource" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSource" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "retry": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Backoff", - "title": "+kubebuilder:default={duration: \"100ms\", steps: 20, factorPercentage: 200, jitterPercentage: 10}" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Source" - }, - "stan": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN" - }, - "volume": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSource" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Step": { - "properties": { - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" - }, - "spec": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec" - }, - "status": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepStatus" - } - }, - "title": "+kubebuilder:object:root=true\n+kubebuilder:subresource:status\n+kubebuilder:subresource:scale:specpath=.spec.replicas,statuspath=.status.replicas,selectorpath=.status.selector\n+kubebuilder:printcolumn:name=\"Phase\",type=string,JSONPath=`.status.phase`\n+kubebuilder:printcolumn:name=\"Reason\",type=string,JSONPath=`.status.reason`\n+kubebuilder:printcolumn:name=\"Message\",type=string,JSONPath=`.status.message`\n+kubebuilder:printcolumn:name=\"Desired\",type=string,JSONPath=`.spec.replicas`\n+kubebuilder:printcolumn:name=\"Current\",type=string,JSONPath=`.status.replicas`", - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec": { - "properties": { - "affinity": { - "$ref": "#/definitions/io.k8s.api.core.v1.Affinity" - }, - "cat": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cat" - }, - "code": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Code" - }, - "container": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Container" - }, - "dedupe": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Dedupe" - }, - "expand": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Expand" - }, - "filter": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Filter" - }, - "flatten": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Flatten" - }, - "git": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Git" - }, - "group": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Group" - }, - "imagePullSecrets": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.LocalObjectReference" - }, - "title": "ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images\nin pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets\ncan be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet.\nMore info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod\n+patchStrategy=merge\n+patchMergeKey=name", - "type": "array" - }, - "map": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Map" - }, - "metadata": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Metadata" - }, - "name": { - "title": "+kubebuilder:default=default", - "type": "string" - }, - "nodeSelector": { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - "replicas": { - "title": "+kubebuilder:default=1", - "type": "integer" - }, - "restartPolicy": { - "title": "+kubebuilder:default=OnFailure", - "type": "string" - }, - "scale": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Scale", - "title": "+kubebuilder:default={peekDelay: \"defaultPeekDelay\", scalingDelay: \"defaultScalingDelay\", desiredReplicas: \"\"}" - }, - "serviceAccountName": { - "title": "+kubebuilder:default=pipeline", - "type": "string" - }, - "sidecar": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sidecar", - "title": "+kubebuilder:default={resources: {limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}}" - }, - "sinks": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sink" - }, - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "type": "array" - }, - "sources": { - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Source" - }, - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "type": "array" - }, - "terminator": { - "type": "boolean" - }, - "tolerations": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.Toleration" - }, - "type": "array" - }, - "volumes": { - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.Volume" - }, - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "type": "array" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepStatus": { - "properties": { - "lastScaledAt": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - }, - "message": { - "type": "string" - }, - "phase": { - "type": "string" - }, - "reason": { - "type": "string" - }, - "replicas": { - "type": "integer" - }, - "selector": { - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Storage": { - "properties": { - "name": { - "type": "string" - }, - "subPath": { - "title": "volume name", - "type": "string" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.TLS": { - "properties": { - "caCertSecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "CACertSecret refers to the secret that contains the CA cert" - }, - "certSecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "CertSecret refers to the secret that contains the cert" - }, - "keySecret": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "KeySecret refers to the secret that contains the key" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSink": { - "properties": { - "abstractVolumeSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource" - } - }, - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSource": { - "properties": { - "abstractVolumeSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource" - }, - "concurrency": { - "title": "+kubebuilder:default=1", - "type": "integer" - }, - "pollPeriod": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration", - "title": "+kubebuilder:default=\"1m\"" - }, - "readOnly": { - "type": "boolean" - } - }, - "type": "object" - }, "google.protobuf.Any": { "properties": { "type_url": { @@ -4485,6 +3324,30 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.ArtGCStatus": { + "description": "ArtGCStatus maintains state related to ArtifactGC", + "properties": { + "notSpecified": { + "description": "if this is true, we already checked to see if we need to do it and we don't", + "type": "boolean" + }, + "podsRecouped": { + "additionalProperties": { + "type": "boolean" + }, + "description": "have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once", + "type": "object" + }, + "strategiesProcessed": { + "additionalProperties": { + "type": "boolean" + }, + "description": "have Pods been started to perform this strategy? (enables us not to re-process what we've already done)", + "type": "object" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Artifact": { "description": "Artifact indicates an artifact to place at a specified path", "properties": { @@ -4496,10 +3359,22 @@ "description": "ArchiveLogs indicates if the container logs should be archived", "type": "boolean" }, + "artifactGC": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC", + "description": "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows" + }, "artifactory": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact", "description": "Artifactory contains artifactory artifact location details" }, + "azure": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact", + "description": "Azure contains Azure Storage artifact location details" + }, + "deleted": { + "description": "Has this been deleted?", + "type": "boolean" + }, "from": { "description": "From allows an artifact to reference an artifact from a previous step", "type": "string" @@ -4570,6 +3445,50 @@ ], "type": "object" }, + "io.argoproj.workflow.v1alpha1.ArtifactGC": { + "description": "ArtifactGC describes how to delete artifacts from completed Workflows", + "properties": { + "podMetadata": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Metadata", + "description": "PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion" + }, + "serviceAccountName": { + "description": "ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion", + "type": "string" + }, + "strategy": { + "description": "Strategy is the strategy to use.", + "type": "string" + } + }, + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.ArtifactGCSpec": { + "description": "ArtifactGCSpec specifies the Artifacts that need to be deleted", + "properties": { + "artifactsByNode": { + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactNodeSpec" + }, + "description": "ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node", + "type": "object" + } + }, + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.ArtifactGCStatus": { + "description": "ArtifactGCStatus describes the result of the deletion", + "properties": { + "artifactResultsByNode": { + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactResultNodeStatus" + }, + "description": "ArtifactResultsByNode maps Node name to result", + "type": "object" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.ArtifactLocation": { "description": "ArtifactLocation describes a location for a single or multiple artifacts. It is used as single artifact in the context of inputs/outputs (e.g. outputs.artifacts.artname). It is also used to describe the location of multiple artifacts such as the archive location of a single workflow step, which the executor will use as a default location to store its files.", "properties": { @@ -4581,6 +3500,10 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact", "description": "Artifactory contains artifactory artifact location details" }, + "azure": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact", + "description": "Azure contains Azure Storage artifact location details" + }, "gcs": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GCSArtifact", "description": "GCS contains GCS artifact location details" @@ -4612,6 +3535,23 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.ArtifactNodeSpec": { + "description": "ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node", + "properties": { + "archiveLocation": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactLocation", + "description": "ArchiveLocation is the template-level Artifact location specification" + }, + "artifacts": { + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Artifact" + }, + "description": "Artifacts maps artifact name to Artifact description", + "type": "object" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.ArtifactPaths": { "description": "ArtifactPaths expands a step from a collection of artifacts", "properties": { @@ -4623,10 +3563,22 @@ "description": "ArchiveLogs indicates if the container logs should be archived", "type": "boolean" }, + "artifactGC": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC", + "description": "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows" + }, "artifactory": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact", "description": "Artifactory contains artifactory artifact location details" }, + "azure": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact", + "description": "Azure contains Azure Storage artifact location details" + }, + "deleted": { + "description": "Has this been deleted?", + "type": "boolean" + }, "from": { "description": "From allows an artifact to reference an artifact from a previous step", "type": "string" @@ -4708,6 +3660,10 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifactRepository", "description": "Artifactory stores artifacts to JFrog Artifactory" }, + "azure": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifactRepository", + "description": "Azure stores artifact in an Azure Storage account" + }, "gcs": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GCSArtifactRepository", "description": "GCS stores artifact in a GCS object store" @@ -4765,6 +3721,40 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.ArtifactResult": { + "description": "ArtifactResult describes the result of attempting to delete a given Artifact", + "properties": { + "error": { + "description": "Error is an optional error message which should be set if Success==false", + "type": "string" + }, + "name": { + "description": "Name is the name of the Artifact", + "type": "string" + }, + "success": { + "description": "Success describes whether the deletion succeeded", + "type": "boolean" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.ArtifactResultNodeStatus": { + "description": "ArtifactResultNodeStatus describes the result of the deletion on a given node", + "properties": { + "artifactResults": { + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactResult" + }, + "description": "ArtifactResults maps Artifact name to result of the deletion", + "type": "object" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.ArtifactoryArtifact": { "description": "ArtifactoryArtifact is the location of an artifactory artifact", "properties": { @@ -4804,6 +3794,67 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.AzureArtifact": { + "description": "AzureArtifact is the location of a an Azure Storage artifact", + "properties": { + "accountKeySecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", + "description": "AccountKeySecret is the secret selector to the Azure Blob Storage account access key" + }, + "blob": { + "description": "Blob is the blob name (i.e., path) in the container where the artifact resides", + "type": "string" + }, + "container": { + "description": "Container is the container where resources will be stored", + "type": "string" + }, + "endpoint": { + "description": "Endpoint is the service url associated with an account. It is most likely \"https://\u003cACCOUNT_NAME\u003e.blob.core.windows.net\"", + "type": "string" + }, + "useSDKCreds": { + "description": "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + "type": "boolean" + } + }, + "required": [ + "endpoint", + "container", + "blob" + ], + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.AzureArtifactRepository": { + "description": "AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository", + "properties": { + "accountKeySecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", + "description": "AccountKeySecret is the secret selector to the Azure Blob Storage account access key" + }, + "blobNameFormat": { + "description": "BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables", + "type": "string" + }, + "container": { + "description": "Container is the container where resources will be stored", + "type": "string" + }, + "endpoint": { + "description": "Endpoint is the service url associated with an account. It is most likely \"https://\u003cACCOUNT_NAME\u003e.blob.core.windows.net\"", + "type": "string" + }, + "useSDKCreds": { + "description": "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + "type": "boolean" + } + }, + "required": [ + "endpoint", + "container" + ], + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Backoff": { "description": "Backoff is a backoff strategy to use within retryStrategy", "properties": { @@ -4822,6 +3873,20 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.BasicAuth": { + "description": "BasicAuth describes the secret selectors required for basic authentication", + "properties": { + "passwordSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", + "description": "PasswordSecret is the secret selector to the repository password" + }, + "usernameSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", + "description": "UsernameSecret is the secret selector to the repository username" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Cache": { "description": "Cache is the configuration for the type of cache to be used", "properties": { @@ -4835,6 +3900,18 @@ ], "type": "object" }, + "io.argoproj.workflow.v1alpha1.ClientCertAuth": { + "description": "ClientCertAuth holds necessary information for client authentication via certificates", + "properties": { + "clientCertSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "clientKeySecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.ClusterWorkflowTemplate": { "description": "ClusterWorkflowTemplate is the definition of a workflow template resource in cluster scope", "properties": { @@ -4932,6 +4009,17 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.CollectEventRequest": { + "properties": { + "name": { + "type": "string" + } + }, + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.CollectEventResponse": { + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Condition": { "properties": { "message": { @@ -4952,14 +4040,14 @@ "io.argoproj.workflow.v1alpha1.ContainerNode": { "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, "type": "array" }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, @@ -4988,7 +4076,7 @@ "type": "array" }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -5570,6 +4658,9 @@ "serviceAccountName": { "type": "string" }, + "serviceAccountNamespace": { + "type": "string" + }, "subject": { "type": "string" } @@ -5579,6 +4670,10 @@ "io.argoproj.workflow.v1alpha1.GitArtifact": { "description": "GitArtifact is the location of an git artifact", "properties": { + "branch": { + "description": "Branch is the branch to fetch when `SingleBranch` is enabled", + "type": "string" + }, "depth": { "description": "Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip", "type": "integer" @@ -5610,6 +4705,10 @@ "description": "Revision is the git commit, tag, branch to checkout", "type": "string" }, + "singleBranch": { + "description": "SingleBranch enables single branch clone, using the `branch` parameter", + "type": "boolean" + }, "sshPrivateKeySecret": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", "description": "SSHPrivateKeySecret is the secret selector to the repository ssh private key" @@ -5731,6 +4830,10 @@ "description": "Body is content of the HTTP Request", "type": "string" }, + "bodyFrom": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.HTTPBodySource", + "description": "BodyFrom is content of the HTTP Request as Bytes" + }, "headers": { "description": "Headers are an optional list of headers to send with HTTP requests", "items": { @@ -5739,7 +4842,7 @@ "type": "array" }, "insecureSkipVerify": { - "description": "insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", + "description": "InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", "type": "boolean" }, "method": { @@ -5765,8 +4868,12 @@ "type": "object" }, "io.argoproj.workflow.v1alpha1.HTTPArtifact": { - "description": "HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container", + "description": "HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container", "properties": { + "auth": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.HTTPAuth", + "description": "Auth contains information for client authentication" + }, "headers": { "description": "Headers are an optional list of headers to send with HTTP requests for artifacts", "items": { @@ -5784,6 +4891,30 @@ ], "type": "object" }, + "io.argoproj.workflow.v1alpha1.HTTPAuth": { + "properties": { + "basicAuth": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.BasicAuth" + }, + "clientCert": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ClientCertAuth" + }, + "oauth2": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.OAuth2Auth" + } + }, + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.HTTPBodySource": { + "description": "HTTPBodySource contains the source of the HTTP body.", + "properties": { + "bytes": { + "format": "byte", + "type": "string" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.HTTPHeader": { "properties": { "name": { @@ -5953,9 +5084,6 @@ "description": "TemplateRef is the reference to the template resource to execute by the hook" } }, - "required": [ - "template" - ], "type": "object" }, "io.argoproj.workflow.v1alpha1.Link": { @@ -6005,6 +5133,18 @@ }, "type": "object" }, + "io.argoproj.workflow.v1alpha1.ManifestFrom": { + "properties": { + "artifact": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Artifact", + "description": "Artifact contains the artifact to use" + } + }, + "required": [ + "artifact" + ], + "type": "object" + }, "io.argoproj.workflow.v1alpha1.MemoizationStatus": { "description": "MemoizationStatus is the status of this memoized node", "properties": { @@ -6295,6 +5435,50 @@ "description": "NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately.", "type": "object" }, + "io.argoproj.workflow.v1alpha1.OAuth2Auth": { + "description": "OAuth2Auth holds all information for client authentication via OAuth2 tokens", + "properties": { + "clientIDSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "clientSecretSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "endpointParams": { + "items": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.OAuth2EndpointParam" + }, + "type": "array" + }, + "scopes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "tokenURLSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + } + }, + "type": "object" + }, + "io.argoproj.workflow.v1alpha1.OAuth2EndpointParam": { + "description": "EndpointParam is for requesting optional fields that should be sent in the oauth request", + "properties": { + "key": { + "description": "Name is the header name", + "type": "string" + }, + "value": { + "description": "Value is the literal value to use for the header", + "type": "string" + } + }, + "required": [ + "key" + ], + "type": "object" + }, "io.argoproj.workflow.v1alpha1.OSSArtifact": { "description": "OSSArtifact is the location of an Alibaba Cloud OSS artifact", "properties": { @@ -6560,6 +5744,10 @@ "description": "Manifest contains the kubernetes manifest", "type": "string" }, + "manifestFrom": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ManifestFrom", + "description": "ManifestFrom is the source for a single kubernetes manifest" + }, "mergeStrategy": { "description": "MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json", "type": "string" @@ -6589,6 +5777,12 @@ "namespace": { "type": "string" }, + "parameters": { + "items": { + "type": "string" + }, + "type": "array" + }, "uid": { "type": "string" } @@ -6615,6 +5809,12 @@ "nodeFieldSelector": { "type": "string" }, + "parameters": { + "items": { + "type": "string" + }, + "type": "array" + }, "restartSuccessful": { "type": "boolean" }, @@ -6645,7 +5845,7 @@ }, "limit": { "$ref": "#/definitions/io.k8s.apimachinery.pkg.util.intstr.IntOrString", - "description": "Limit is the maximum number of attempts when retrying a container" + "description": "Limit is the maximum number of retry attempts when retrying a container. It does not include the original container; the maximum number of total attempts will be `limit + 1`." }, "retryPolicy": { "description": "RetryPolicy is a policy of NodePhase statuses that will be retried", @@ -6782,14 +5982,14 @@ "description": "ScriptTemplate is a template subtype to enable scripting through code steps", "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, "type": "array" }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, @@ -6812,7 +6012,7 @@ "type": "array" }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -7371,14 +6571,14 @@ "description": "UserContainer is a container specified by a user.", "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, "type": "array" }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "items": { "type": "string" }, @@ -7401,7 +6601,7 @@ "type": "array" }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -7800,6 +7000,12 @@ }, "namespace": { "type": "string" + }, + "parameters": { + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" @@ -7829,6 +7035,12 @@ "nodeFieldSelector": { "type": "string" }, + "parameters": { + "items": { + "type": "string" + }, + "type": "array" + }, "restartSuccessful": { "type": "boolean" } @@ -7877,6 +7089,10 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Arguments", "description": "Arguments contain the parameters and artifacts sent to the workflow entrypoint Parameters are referencable globally using the 'workflow' variable prefix. e.g. {{io.argoproj.workflow.v1alpha1.parameters.myparam}}" }, + "artifactGC": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC", + "description": "ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts unless Artifact.ArtifactGC is specified, which overrides this)" + }, "artifactRepositoryRef": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactRepositoryRef", "description": "ArtifactRepositoryRef specifies the configMap name and key containing the artifact repository config." @@ -7954,14 +7170,14 @@ }, "podGC": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.PodGC", - "description": "PodGC describes the strategy to use when to deleting completed pods" + "description": "PodGC describes the strategy to use when deleting completed pods" }, "podMetadata": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Metadata", "description": "PodMetadata defines additional metadata that should be applied to workflow pods" }, "podPriority": { - "description": "Priority to apply to workflow pods.", + "description": "Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.", "type": "integer" }, "podPriorityClassName": { @@ -8032,7 +7248,7 @@ }, "volumeClaimGC": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.VolumeClaimGC", - "description": "VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows" + "description": "VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows" }, "volumeClaimTemplates": { "description": "VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow", @@ -8054,7 +7270,7 @@ }, "workflowMetadata": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowMetadata", - "description": "WorkflowMetadata contains some metadata of the workflow to be refer" + "description": "WorkflowMetadata contains some metadata of the workflow to refer to" }, "workflowTemplateRef": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateRef", @@ -8066,6 +7282,10 @@ "io.argoproj.workflow.v1alpha1.WorkflowStatus": { "description": "WorkflowStatus contains overall status information about a workflow", "properties": { + "artifactGCStatus": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtGCStatus", + "description": "ArtifactGCStatus maintains the status of Artifact Garbage Collection" + }, "artifactRepositoryRef": { "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactRepositoryRefStatus", "description": "ArtifactRepositoryRef is used to cache the repository to use so we do not need to determine it everytime we reconcile." @@ -10997,43 +10217,6 @@ "description": "Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors.\n\nThe serialization format is:\n\n\u003cquantity\u003e ::= \u003csignedNumber\u003e\u003csuffix\u003e\n (Note that \u003csuffix\u003e may be empty, from the \"\" case in \u003cdecimalSI\u003e.)\n\u003cdigit\u003e ::= 0 | 1 | ... | 9 \u003cdigits\u003e ::= \u003cdigit\u003e | \u003cdigit\u003e\u003cdigits\u003e \u003cnumber\u003e ::= \u003cdigits\u003e | \u003cdigits\u003e.\u003cdigits\u003e | \u003cdigits\u003e. | .\u003cdigits\u003e \u003csign\u003e ::= \"+\" | \"-\" \u003csignedNumber\u003e ::= \u003cnumber\u003e | \u003csign\u003e\u003cnumber\u003e \u003csuffix\u003e ::= \u003cbinarySI\u003e | \u003cdecimalExponent\u003e | \u003cdecimalSI\u003e \u003cbinarySI\u003e ::= Ki | Mi | Gi | Ti | Pi | Ei\n (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html)\n\u003cdecimalSI\u003e ::= m | \"\" | k | M | G | T | P | E\n (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.)\n\u003cdecimalExponent\u003e ::= \"e\" \u003csignedNumber\u003e | \"E\" \u003csignedNumber\u003e\n\nNo matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities.\n\nWhen a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized.\n\nBefore serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that:\n a. No precision is lost\n b. No fractional digits will be emitted\n c. The exponent (or suffix) is as large as possible.\nThe sign will be omitted unless the number is negative.\n\nExamples:\n 1.5 will be serialized as \"1500m\"\n 1.5Gi will be serialized as \"1536Mi\"\n\nNote that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise.\n\nNon-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.)\n\nThis format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.", "type": "string" }, - "io.k8s.apimachinery.pkg.apis.meta.v1.Condition": { - "description": "Condition contains details for one aspect of the current state of this API Resource.", - "properties": { - "lastTransitionTime": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time", - "description": "lastTransitionTime is the last time the condition transitioned from one status to another. This should be when the underlying condition changed. If that is not known, then using the time when the API field changed is acceptable." - }, - "message": { - "description": "message is a human readable message indicating details about the transition. This may be an empty string.", - "type": "string" - }, - "observedGeneration": { - "description": "observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance.", - "type": "integer" - }, - "reason": { - "description": "reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty.", - "type": "string" - }, - "status": { - "description": "status of the condition, one of True, False, Unknown.", - "type": "string" - }, - "type": { - "description": "type of condition in CamelCase or in foo.example.com/CamelCase.", - "type": "string" - } - }, - "required": [ - "type", - "status", - "lastTransitionTime", - "reason", - "message" - ], - "type": "object" - }, "io.k8s.apimachinery.pkg.apis.meta.v1.CreateOptions": { "description": "CreateOptions may be provided when creating an API object.", "properties": { @@ -11049,16 +10232,7 @@ "type": "string" }, "fieldValidation": { - "title": "fieldValidation determines how the server should respond to\nunknown/duplicate fields in the object in the request.\nIntroduced as alpha in 1.23, older servers or servers with the\n`ServerSideFieldValidation` feature disabled will discard valid values\nspecified in this param and not perform any server side field validation.\nValid values are:\n- Ignore: ignores unknown/duplicate fields.\n- Warn: responds with a warning for each\nunknown/duplicate field, but successfully serves the request.\n- Strict: fails the request on unknown/duplicate fields.\n+optional", - "type": "string" - } - }, - "type": "object" - }, - "io.k8s.apimachinery.pkg.apis.meta.v1.Duration": { - "description": "Duration is a wrapper around time.Duration which supports correct\nmarshaling to YAML and JSON. In particular, it marshals into strings, which\ncan be used as map keys in json.", - "properties": { - "duration": { + "title": "fieldValidation instructs the server on how to handle\nobjects in the request (POST/PUT/PATCH) containing unknown\nor duplicate fields, provided that the `ServerSideFieldValidation`\nfeature gate is also enabled. Valid values are:\n- Ignore: This will ignore any unknown fields that are silently\ndropped from the object, and will ignore all but the last duplicate\nfield that the decoder encounters. This is the default behavior\nprior to v1.23 and is the default behavior when the\n`ServerSideFieldValidation` feature gate is disabled.\n- Warn: This will send a warning via the standard warning response\nheader for each unknown field that is dropped from the object, and\nfor each duplicate field that is encountered. The request will\nstill succeed if there are no other errors, and will only persist\nthe last of any duplicate fields. This is the default when the\n`ServerSideFieldValidation` feature gate is enabled.\n- Strict: This will fail the request with a BadRequest error if\nany unknown fields would be dropped from the object, or if any\nduplicate fields are present. The error returned from the server\nwill contain all unknown and duplicate fields encountered.\n+optional", "type": "string" } }, @@ -11344,55 +10518,6 @@ "io.k8s.apimachinery.pkg.util.intstr.IntOrString": { "type": "string" }, - "pipeline.DeletePipelineResponse": { - "type": "object" - }, - "pipeline.LogEntry": { - "properties": { - "msg": { - "type": "string" - }, - "namespace": { - "type": "string" - }, - "pipelineName": { - "type": "string" - }, - "stepName": { - "type": "string" - }, - "time": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - } - }, - "title": "structured log entry", - "type": "object" - }, - "pipeline.PipelineWatchEvent": { - "properties": { - "object": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline" - }, - "type": { - "type": "string" - } - }, - "type": "object" - }, - "pipeline.RestartPipelineResponse": { - "type": "object" - }, - "pipeline.StepWatchEvent": { - "properties": { - "object": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Step" - }, - "type": { - "type": "string" - } - }, - "type": "object" - }, "sensor.CreateSensorRequest": { "properties": { "createOptions": { diff --git a/api/openapi-spec/swagger.json b/api/openapi-spec/swagger.json index 497112b2980a..4c7c6cc229c9 100644 --- a/api/openapi-spec/swagger.json +++ b/api/openapi-spec/swagger.json @@ -1374,245 +1374,6 @@ } } }, - "/api/v1/pipelines/{namespace}": { - "get": { - "tags": [ - "PipelineService" - ], - "operationId": "PipelineService_ListPipelines", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", - "name": "listOptions.labelSelector", - "in": "query" - }, - { - "type": "string", - "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", - "name": "listOptions.fieldSelector", - "in": "query" - }, - { - "type": "boolean", - "description": "Watch for changes to the described resources and return them as a stream of\nadd, update, and remove notifications. Specify resourceVersion.\n+optional.", - "name": "listOptions.watch", - "in": "query" - }, - { - "type": "boolean", - "description": "allowWatchBookmarks requests watch events with type \"BOOKMARK\".\nServers that do not implement bookmarks may ignore this flag and\nbookmarks are sent at the server's discretion. Clients should not\nassume bookmarks are returned at any specific interval, nor may they\nassume the server will send any BOOKMARK event during a session.\nIf this is not a watch, this field is ignored.\n+optional.", - "name": "listOptions.allowWatchBookmarks", - "in": "query" - }, - { - "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersion", - "in": "query" - }, - { - "type": "string", - "description": "resourceVersionMatch determines how resourceVersion is applied to list calls.\nIt is highly recommended that resourceVersionMatch be set for list calls where\nresourceVersion is set\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersionMatch", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "Timeout for the list/watch call.\nThis limits the duration of the call, regardless of any activity or inactivity.\n+optional.", - "name": "listOptions.timeoutSeconds", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "limit is a maximum number of responses to return for a list call. If more items exist, the\nserver will set the `continue` field on the list metadata to a value that can be used with the\nsame initial query to retrieve the next set of results. Setting a limit may return fewer than\nthe requested amount of items (up to zero items) in the event all requested objects are\nfiltered out and clients should only use the presence of the continue field to determine whether\nmore results are available. Servers may choose not to support the limit argument and will return\nall of the available results. If limit is specified and the continue field is empty, clients may\nassume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing\na single list call without a limit - that is, no objects created, modified, or deleted after the\nfirst request is issued will be included in any subsequent continued requests. This is sometimes\nreferred to as a consistent snapshot, and ensures that a client that is using limit to receive\nsmaller chunks of a very large result can ensure they see all possible objects. If objects are\nupdated during a chunked list the version of the object that was present at the time the first list\nresult was calculated is returned.", - "name": "listOptions.limit", - "in": "query" - }, - { - "type": "string", - "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", - "name": "listOptions.continue", - "in": "query" - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineList" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/api/v1/pipelines/{namespace}/{name}": { - "get": { - "tags": [ - "PipelineService" - ], - "operationId": "PipelineService_GetPipeline", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "name", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "getOptions.resourceVersion", - "in": "query" - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - }, - "delete": { - "tags": [ - "PipelineService" - ], - "operationId": "PipelineService_DeletePipeline", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "name", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "int64", - "description": "The duration in seconds before the object should be deleted. Value must be non-negative integer.\nThe value zero indicates delete immediately. If this value is nil, the default grace period for the\nspecified type will be used.\nDefaults to a per object value if not specified. zero means delete immediately.\n+optional.", - "name": "deleteOptions.gracePeriodSeconds", - "in": "query" - }, - { - "type": "string", - "description": "Specifies the target UID.\n+optional.", - "name": "deleteOptions.preconditions.uid", - "in": "query" - }, - { - "type": "string", - "description": "Specifies the target ResourceVersion\n+optional.", - "name": "deleteOptions.preconditions.resourceVersion", - "in": "query" - }, - { - "type": "boolean", - "description": "Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7.\nShould the dependent objects be orphaned. If true/false, the \"orphan\"\nfinalizer will be added to/removed from the object's finalizers list.\nEither this field or PropagationPolicy may be set, but not both.\n+optional.", - "name": "deleteOptions.orphanDependents", - "in": "query" - }, - { - "type": "string", - "description": "Whether and how garbage collection will be performed.\nEither this field or OrphanDependents may be set, but not both.\nThe default policy is decided by the existing finalizer set in the\nmetadata.finalizers and the resource-specific default policy.\nAcceptable values are: 'Orphan' - orphan the dependents; 'Background' -\nallow the garbage collector to delete the dependents in the background;\n'Foreground' - a cascading policy that deletes all dependents in the\nforeground.\n+optional.", - "name": "deleteOptions.propagationPolicy", - "in": "query" - }, - { - "type": "array", - "items": { - "type": "string" - }, - "collectionFormat": "multi", - "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional.", - "name": "deleteOptions.dryRun", - "in": "query" - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/pipeline.DeletePipelineResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/api/v1/pipelines/{namespace}/{name}/restart": { - "post": { - "tags": [ - "PipelineService" - ], - "operationId": "PipelineService_RestartPipeline", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "name", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/pipeline.RestartPipelineResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, "/api/v1/sensors/{namespace}": { "get": { "tags": [ @@ -2212,12 +1973,12 @@ } } }, - "/api/v1/stream/pipelines/{namespace}": { + "/api/v1/stream/sensors/{namespace}": { "get": { "tags": [ - "PipelineService" + "SensorService" ], - "operationId": "PipelineService_WatchPipelines", + "operationId": "SensorService_WatchSensors", "parameters": [ { "type": "string", @@ -2287,13 +2048,13 @@ "description": "A successful response.(streaming responses)", "schema": { "type": "object", - "title": "Stream result of pipeline.PipelineWatchEvent", + "title": "Stream result of sensor.SensorWatchEvent", "properties": { "error": { "$ref": "#/definitions/grpc.gateway.runtime.StreamError" }, "result": { - "$ref": "#/definitions/pipeline.PipelineWatchEvent" + "$ref": "#/definitions/sensor.SensorWatchEvent" } } } @@ -2307,12 +2068,12 @@ } } }, - "/api/v1/stream/pipelines/{namespace}/logs": { + "/api/v1/stream/sensors/{namespace}/logs": { "get": { "tags": [ - "PipelineService" + "SensorService" ], - "operationId": "PipelineService_PipelineLogs", + "operationId": "SensorService_SensorsLogs", "parameters": [ { "type": "string", @@ -2322,19 +2083,19 @@ }, { "type": "string", - "description": "optional - only return entries for this pipeline.", + "description": "optional - only return entries for this sensor name.", "name": "name", "in": "query" }, { "type": "string", - "description": "optional - only return entries for this step.", - "name": "stepName", + "description": "optional - only return entries for this trigger.", + "name": "triggerName", "in": "query" }, { "type": "string", - "description": "optional - only return entries which match this expresssion.", + "description": "option - only return entries where `msg` contains this regular expressions.", "name": "grep", "in": "query" }, @@ -2409,13 +2170,13 @@ "description": "A successful response.(streaming responses)", "schema": { "type": "object", - "title": "Stream result of pipeline.LogEntry", + "title": "Stream result of sensor.LogEntry", "properties": { "error": { "$ref": "#/definitions/grpc.gateway.runtime.StreamError" }, "result": { - "$ref": "#/definitions/pipeline.LogEntry" + "$ref": "#/definitions/sensor.LogEntry" } } } @@ -2429,27 +2190,103 @@ } } }, - "/api/v1/stream/sensors/{namespace}": { - "get": { + "/api/v1/tracking/event": { + "post": { "tags": [ - "SensorService" + "InfoService" ], - "operationId": "SensorService_WatchSensors", + "operationId": "InfoService_CollectEvent", "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.CollectEventRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.CollectEventResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/api/v1/userinfo": { + "get": { + "tags": [ + "InfoService" + ], + "operationId": "InfoService_GetUserInfo", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GetUserInfoResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/api/v1/version": { + "get": { + "tags": [ + "InfoService" + ], + "operationId": "InfoService_GetVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Version" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/api/v1/workflow-event-bindings/{namespace}": { + "get": { + "tags": [ + "EventService" + ], + "operationId": "EventService_ListWorkflowEventBindings", + "parameters": [ + { + "type": "string", + "name": "namespace", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", + "name": "listOptions.labelSelector", + "in": "query" + }, { "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", - "name": "listOptions.labelSelector", - "in": "query" - }, - { - "type": "string", "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", "name": "listOptions.fieldSelector", "in": "query" @@ -2501,18 +2338,9 @@ ], "responses": { "200": { - "description": "A successful response.(streaming responses)", + "description": "A successful response.", "schema": { - "type": "object", - "title": "Stream result of sensor.SensorWatchEvent", - "properties": { - "error": { - "$ref": "#/definitions/grpc.gateway.runtime.StreamError" - }, - "result": { - "$ref": "#/definitions/sensor.SensorWatchEvent" - } - } + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowEventBindingList" } }, "default": { @@ -2524,12 +2352,12 @@ } } }, - "/api/v1/stream/sensors/{namespace}/logs": { + "/api/v1/workflow-events/{namespace}": { "get": { "tags": [ - "SensorService" + "WorkflowService" ], - "operationId": "SensorService_SensorsLogs", + "operationId": "WorkflowService_WatchWorkflows", "parameters": [ { "type": "string", @@ -2539,85 +2367,63 @@ }, { "type": "string", - "description": "optional - only return entries for this sensor name.", - "name": "name", - "in": "query" - }, - { - "type": "string", - "description": "optional - only return entries for this trigger.", - "name": "triggerName", - "in": "query" - }, - { - "type": "string", - "description": "option - only return entries where `msg` contains this regular expressions.", - "name": "grep", + "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", + "name": "listOptions.labelSelector", "in": "query" }, { "type": "string", - "description": "The container for which to stream logs. Defaults to only container if there is one container in the pod.\n+optional.", - "name": "podLogOptions.container", + "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", + "name": "listOptions.fieldSelector", "in": "query" }, { "type": "boolean", - "description": "Follow the log stream of the pod. Defaults to false.\n+optional.", - "name": "podLogOptions.follow", + "description": "Watch for changes to the described resources and return them as a stream of\nadd, update, and remove notifications. Specify resourceVersion.\n+optional.", + "name": "listOptions.watch", "in": "query" }, { "type": "boolean", - "description": "Return previous terminated container logs. Defaults to false.\n+optional.", - "name": "podLogOptions.previous", + "description": "allowWatchBookmarks requests watch events with type \"BOOKMARK\".\nServers that do not implement bookmarks may ignore this flag and\nbookmarks are sent at the server's discretion. Clients should not\nassume bookmarks are returned at any specific interval, nor may they\nassume the server will send any BOOKMARK event during a session.\nIf this is not a watch, this field is ignored.\n+optional.", + "name": "listOptions.allowWatchBookmarks", "in": "query" }, { "type": "string", - "format": "int64", - "description": "A relative time in seconds before the current time from which to show logs. If this value\nprecedes the time a pod was started, only logs since the pod start will be returned.\nIf this value is in the future, no logs will be returned.\nOnly one of sinceSeconds or sinceTime may be specified.\n+optional.", - "name": "podLogOptions.sinceSeconds", + "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", + "name": "listOptions.resourceVersion", "in": "query" }, { "type": "string", - "format": "int64", - "description": "Represents seconds of UTC time since Unix epoch\n1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to\n9999-12-31T23:59:59Z inclusive.", - "name": "podLogOptions.sinceTime.seconds", - "in": "query" - }, - { - "type": "integer", - "format": "int32", - "description": "Non-negative fractions of a second at nanosecond resolution. Negative\nsecond values with fractions must still have non-negative nanos values\nthat count forward in time. Must be from 0 to 999,999,999\ninclusive. This field may be limited in precision depending on context.", - "name": "podLogOptions.sinceTime.nanos", + "description": "resourceVersionMatch determines how resourceVersion is applied to list calls.\nIt is highly recommended that resourceVersionMatch be set for list calls where\nresourceVersion is set\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", + "name": "listOptions.resourceVersionMatch", "in": "query" }, { - "type": "boolean", - "description": "If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line\nof log output. Defaults to false.\n+optional.", - "name": "podLogOptions.timestamps", + "type": "string", + "format": "int64", + "description": "Timeout for the list/watch call.\nThis limits the duration of the call, regardless of any activity or inactivity.\n+optional.", + "name": "listOptions.timeoutSeconds", "in": "query" }, { "type": "string", "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", - "name": "podLogOptions.tailLines", + "description": "limit is a maximum number of responses to return for a list call. If more items exist, the\nserver will set the `continue` field on the list metadata to a value that can be used with the\nsame initial query to retrieve the next set of results. Setting a limit may return fewer than\nthe requested amount of items (up to zero items) in the event all requested objects are\nfiltered out and clients should only use the presence of the continue field to determine whether\nmore results are available. Servers may choose not to support the limit argument and will return\nall of the available results. If limit is specified and the continue field is empty, clients may\nassume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing\na single list call without a limit - that is, no objects created, modified, or deleted after the\nfirst request is issued will be included in any subsequent continued requests. This is sometimes\nreferred to as a consistent snapshot, and ensures that a client that is using limit to receive\nsmaller chunks of a very large result can ensure they see all possible objects. If objects are\nupdated during a chunked list the version of the object that was present at the time the first list\nresult was calculated is returned.", + "name": "listOptions.limit", "in": "query" }, { "type": "string", - "format": "int64", - "description": "If set, the number of bytes to read from the server before terminating the\nlog output. This may not display a complete final line of logging, and may return\nslightly more or slightly less than the specified limit.\n+optional.", - "name": "podLogOptions.limitBytes", + "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", + "name": "listOptions.continue", "in": "query" }, { - "type": "boolean", - "description": "insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the\nserving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver\nand the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real\nkubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the\nconnection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept\nthe actual log data coming from the real kubelet).\n+optional.", - "name": "podLogOptions.insecureSkipTLSVerifyBackend", + "type": "string", + "name": "fields", "in": "query" } ], @@ -2626,13 +2432,13 @@ "description": "A successful response.(streaming responses)", "schema": { "type": "object", - "title": "Stream result of sensor.LogEntry", + "title": "Stream result of io.argoproj.workflow.v1alpha1.WorkflowWatchEvent", "properties": { "error": { "$ref": "#/definitions/grpc.gateway.runtime.StreamError" }, "result": { - "$ref": "#/definitions/sensor.LogEntry" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowWatchEvent" } } } @@ -2646,12 +2452,12 @@ } } }, - "/api/v1/stream/steps/{namespace}": { + "/api/v1/workflow-templates/{namespace}": { "get": { "tags": [ - "PipelineService" + "WorkflowTemplateService" ], - "operationId": "PipelineService_WatchSteps", + "operationId": "WorkflowTemplateService_ListWorkflowTemplates", "parameters": [ { "type": "string", @@ -2718,18 +2524,9 @@ ], "responses": { "200": { - "description": "A successful response.(streaming responses)", + "description": "A successful response.", "schema": { - "type": "object", - "title": "Stream result of pipeline.StepWatchEvent", - "properties": { - "error": { - "$ref": "#/definitions/grpc.gateway.runtime.StreamError" - }, - "result": { - "$ref": "#/definitions/pipeline.StepWatchEvent" - } - } + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateList" } }, "default": { @@ -2739,19 +2536,33 @@ } } } - } - }, - "/api/v1/userinfo": { - "get": { + }, + "post": { "tags": [ - "InfoService" + "WorkflowTemplateService" + ], + "operationId": "WorkflowTemplateService_CreateWorkflowTemplate", + "parameters": [ + { + "type": "string", + "name": "namespace", + "in": "path", + "required": true + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateCreateRequest" + } + } ], - "operationId": "InfoService_GetUserInfo", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GetUserInfoResponse" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" } }, "default": { @@ -2763,17 +2574,33 @@ } } }, - "/api/v1/version": { - "get": { + "/api/v1/workflow-templates/{namespace}/lint": { + "post": { "tags": [ - "InfoService" + "WorkflowTemplateService" + ], + "operationId": "WorkflowTemplateService_LintWorkflowTemplate", + "parameters": [ + { + "type": "string", + "name": "namespace", + "in": "path", + "required": true + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateLintRequest" + } + } ], - "operationId": "InfoService_GetVersion", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Version" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" } }, "default": { @@ -2785,12 +2612,12 @@ } } }, - "/api/v1/workflow-event-bindings/{namespace}": { + "/api/v1/workflow-templates/{namespace}/{name}": { "get": { "tags": [ - "EventService" + "WorkflowTemplateService" ], - "operationId": "EventService_ListWorkflowEventBindings", + "operationId": "WorkflowTemplateService_GetWorkflowTemplate", "parameters": [ { "type": "string", @@ -2800,66 +2627,65 @@ }, { "type": "string", - "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", - "name": "listOptions.labelSelector", - "in": "query" - }, - { - "type": "string", - "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", - "name": "listOptions.fieldSelector", - "in": "query" - }, - { - "type": "boolean", - "description": "Watch for changes to the described resources and return them as a stream of\nadd, update, and remove notifications. Specify resourceVersion.\n+optional.", - "name": "listOptions.watch", - "in": "query" - }, - { - "type": "boolean", - "description": "allowWatchBookmarks requests watch events with type \"BOOKMARK\".\nServers that do not implement bookmarks may ignore this flag and\nbookmarks are sent at the server's discretion. Clients should not\nassume bookmarks are returned at any specific interval, nor may they\nassume the server will send any BOOKMARK event during a session.\nIf this is not a watch, this field is ignored.\n+optional.", - "name": "listOptions.allowWatchBookmarks", - "in": "query" + "name": "name", + "in": "path", + "required": true }, { "type": "string", "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersion", - "in": "query" - }, - { - "type": "string", - "description": "resourceVersionMatch determines how resourceVersion is applied to list calls.\nIt is highly recommended that resourceVersionMatch be set for list calls where\nresourceVersion is set\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersionMatch", + "name": "getOptions.resourceVersion", "in": "query" + } + ], + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" + } }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + }, + "put": { + "tags": [ + "WorkflowTemplateService" + ], + "operationId": "WorkflowTemplateService_UpdateWorkflowTemplate", + "parameters": [ { "type": "string", - "format": "int64", - "description": "Timeout for the list/watch call.\nThis limits the duration of the call, regardless of any activity or inactivity.\n+optional.", - "name": "listOptions.timeoutSeconds", - "in": "query" + "name": "namespace", + "in": "path", + "required": true }, { "type": "string", - "format": "int64", - "description": "limit is a maximum number of responses to return for a list call. If more items exist, the\nserver will set the `continue` field on the list metadata to a value that can be used with the\nsame initial query to retrieve the next set of results. Setting a limit may return fewer than\nthe requested amount of items (up to zero items) in the event all requested objects are\nfiltered out and clients should only use the presence of the continue field to determine whether\nmore results are available. Servers may choose not to support the limit argument and will return\nall of the available results. If limit is specified and the continue field is empty, clients may\nassume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing\na single list call without a limit - that is, no objects created, modified, or deleted after the\nfirst request is issued will be included in any subsequent continued requests. This is sometimes\nreferred to as a consistent snapshot, and ensures that a client that is using limit to receive\nsmaller chunks of a very large result can ensure they see all possible objects. If objects are\nupdated during a chunked list the version of the object that was present at the time the first list\nresult was calculated is returned.", - "name": "listOptions.limit", - "in": "query" + "description": "DEPRECATED: This field is ignored.", + "name": "name", + "in": "path", + "required": true }, { - "type": "string", - "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", - "name": "listOptions.continue", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateUpdateRequest" + } } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowEventBindingList" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" } }, "default": { @@ -2869,14 +2695,12 @@ } } } - } - }, - "/api/v1/workflow-events/{namespace}": { - "get": { + }, + "delete": { "tags": [ - "WorkflowService" + "WorkflowTemplateService" ], - "operationId": "WorkflowService_WatchWorkflows", + "operationId": "WorkflowTemplateService_DeleteWorkflowTemplate", "parameters": [ { "type": "string", @@ -2886,80 +2710,57 @@ }, { "type": "string", - "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", - "name": "listOptions.labelSelector", - "in": "query" - }, - { - "type": "string", - "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", - "name": "listOptions.fieldSelector", - "in": "query" - }, - { - "type": "boolean", - "description": "Watch for changes to the described resources and return them as a stream of\nadd, update, and remove notifications. Specify resourceVersion.\n+optional.", - "name": "listOptions.watch", - "in": "query" - }, - { - "type": "boolean", - "description": "allowWatchBookmarks requests watch events with type \"BOOKMARK\".\nServers that do not implement bookmarks may ignore this flag and\nbookmarks are sent at the server's discretion. Clients should not\nassume bookmarks are returned at any specific interval, nor may they\nassume the server will send any BOOKMARK event during a session.\nIf this is not a watch, this field is ignored.\n+optional.", - "name": "listOptions.allowWatchBookmarks", - "in": "query" + "name": "name", + "in": "path", + "required": true }, { "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersion", + "format": "int64", + "description": "The duration in seconds before the object should be deleted. Value must be non-negative integer.\nThe value zero indicates delete immediately. If this value is nil, the default grace period for the\nspecified type will be used.\nDefaults to a per object value if not specified. zero means delete immediately.\n+optional.", + "name": "deleteOptions.gracePeriodSeconds", "in": "query" }, { "type": "string", - "description": "resourceVersionMatch determines how resourceVersion is applied to list calls.\nIt is highly recommended that resourceVersionMatch be set for list calls where\nresourceVersion is set\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersionMatch", + "description": "Specifies the target UID.\n+optional.", + "name": "deleteOptions.preconditions.uid", "in": "query" }, { "type": "string", - "format": "int64", - "description": "Timeout for the list/watch call.\nThis limits the duration of the call, regardless of any activity or inactivity.\n+optional.", - "name": "listOptions.timeoutSeconds", + "description": "Specifies the target ResourceVersion\n+optional.", + "name": "deleteOptions.preconditions.resourceVersion", "in": "query" }, { - "type": "string", - "format": "int64", - "description": "limit is a maximum number of responses to return for a list call. If more items exist, the\nserver will set the `continue` field on the list metadata to a value that can be used with the\nsame initial query to retrieve the next set of results. Setting a limit may return fewer than\nthe requested amount of items (up to zero items) in the event all requested objects are\nfiltered out and clients should only use the presence of the continue field to determine whether\nmore results are available. Servers may choose not to support the limit argument and will return\nall of the available results. If limit is specified and the continue field is empty, clients may\nassume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing\na single list call without a limit - that is, no objects created, modified, or deleted after the\nfirst request is issued will be included in any subsequent continued requests. This is sometimes\nreferred to as a consistent snapshot, and ensures that a client that is using limit to receive\nsmaller chunks of a very large result can ensure they see all possible objects. If objects are\nupdated during a chunked list the version of the object that was present at the time the first list\nresult was calculated is returned.", - "name": "listOptions.limit", + "type": "boolean", + "description": "Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7.\nShould the dependent objects be orphaned. If true/false, the \"orphan\"\nfinalizer will be added to/removed from the object's finalizers list.\nEither this field or PropagationPolicy may be set, but not both.\n+optional.", + "name": "deleteOptions.orphanDependents", "in": "query" }, { "type": "string", - "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", - "name": "listOptions.continue", + "description": "Whether and how garbage collection will be performed.\nEither this field or OrphanDependents may be set, but not both.\nThe default policy is decided by the existing finalizer set in the\nmetadata.finalizers and the resource-specific default policy.\nAcceptable values are: 'Orphan' - orphan the dependents; 'Background' -\nallow the garbage collector to delete the dependents in the background;\n'Foreground' - a cascading policy that deletes all dependents in the\nforeground.\n+optional.", + "name": "deleteOptions.propagationPolicy", "in": "query" }, { - "type": "string", - "name": "fields", + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi", + "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional.", + "name": "deleteOptions.dryRun", "in": "query" } ], "responses": { "200": { - "description": "A successful response.(streaming responses)", + "description": "A successful response.", "schema": { - "type": "object", - "title": "Stream result of io.argoproj.workflow.v1alpha1.WorkflowWatchEvent", - "properties": { - "error": { - "$ref": "#/definitions/grpc.gateway.runtime.StreamError" - }, - "result": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowWatchEvent" - } - } + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateDeleteResponse" } }, "default": { @@ -2971,12 +2772,12 @@ } } }, - "/api/v1/workflow-templates/{namespace}": { + "/api/v1/workflows/{namespace}": { "get": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_ListWorkflowTemplates", + "operationId": "WorkflowService_ListWorkflows", "parameters": [ { "type": "string", @@ -3039,13 +2840,19 @@ "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", "name": "listOptions.continue", "in": "query" + }, + { + "type": "string", + "description": "Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".", + "name": "fields", + "in": "query" } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateList" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowList" } }, "default": { @@ -3058,9 +2865,9 @@ }, "post": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_CreateWorkflowTemplate", + "operationId": "WorkflowService_CreateWorkflow", "parameters": [ { "type": "string", @@ -3073,7 +2880,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateCreateRequest" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowCreateRequest" } } ], @@ -3081,7 +2888,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" } }, "default": { @@ -3093,12 +2900,12 @@ } } }, - "/api/v1/workflow-templates/{namespace}/lint": { + "/api/v1/workflows/{namespace}/lint": { "post": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_LintWorkflowTemplate", + "operationId": "WorkflowService_LintWorkflow", "parameters": [ { "type": "string", @@ -3111,7 +2918,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateLintRequest" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowLintRequest" } } ], @@ -3119,7 +2926,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" } }, "default": { @@ -3131,12 +2938,12 @@ } } }, - "/api/v1/workflow-templates/{namespace}/{name}": { - "get": { + "/api/v1/workflows/{namespace}/submit": { + "post": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_GetWorkflowTemplate", + "operationId": "WorkflowService_SubmitWorkflow", "parameters": [ { "type": "string", @@ -3145,23 +2952,19 @@ "required": true }, { - "type": "string", - "name": "name", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "getOptions.resourceVersion", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSubmitRequest" + } } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" } }, "default": { @@ -3171,12 +2974,14 @@ } } } - }, - "put": { + } + }, + "/api/v1/workflows/{namespace}/{name}": { + "get": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_UpdateWorkflowTemplate", + "operationId": "WorkflowService_GetWorkflow", "parameters": [ { "type": "string", @@ -3186,25 +2991,28 @@ }, { "type": "string", - "description": "DEPRECATED: This field is ignored.", "name": "name", "in": "path", "required": true }, { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateUpdateRequest" - } + "type": "string", + "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", + "name": "getOptions.resourceVersion", + "in": "query" + }, + { + "type": "string", + "description": "Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".", + "name": "fields", + "in": "query" } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplate" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" } }, "default": { @@ -3217,9 +3025,9 @@ }, "delete": { "tags": [ - "WorkflowTemplateService" + "WorkflowService" ], - "operationId": "WorkflowTemplateService_DeleteWorkflowTemplate", + "operationId": "WorkflowService_DeleteWorkflow", "parameters": [ { "type": "string", @@ -3273,13 +3081,18 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "name": "force", + "in": "query" } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTemplateDeleteResponse" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowDeleteResponse" } }, "default": { @@ -3291,12 +3104,12 @@ } } }, - "/api/v1/workflows/{namespace}": { + "/api/v1/workflows/{namespace}/{name}/log": { "get": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_ListWorkflows", + "operationId": "WorkflowService_WorkflowLogs", "parameters": [ { "type": "string", @@ -3306,72 +3119,105 @@ }, { "type": "string", - "description": "A selector to restrict the list of returned objects by their labels.\nDefaults to everything.\n+optional.", - "name": "listOptions.labelSelector", + "name": "name", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "podName", "in": "query" }, { "type": "string", - "description": "A selector to restrict the list of returned objects by their fields.\nDefaults to everything.\n+optional.", - "name": "listOptions.fieldSelector", + "description": "The container for which to stream logs. Defaults to only container if there is one container in the pod.\n+optional.", + "name": "logOptions.container", "in": "query" }, { "type": "boolean", - "description": "Watch for changes to the described resources and return them as a stream of\nadd, update, and remove notifications. Specify resourceVersion.\n+optional.", - "name": "listOptions.watch", + "description": "Follow the log stream of the pod. Defaults to false.\n+optional.", + "name": "logOptions.follow", "in": "query" }, { "type": "boolean", - "description": "allowWatchBookmarks requests watch events with type \"BOOKMARK\".\nServers that do not implement bookmarks may ignore this flag and\nbookmarks are sent at the server's discretion. Clients should not\nassume bookmarks are returned at any specific interval, nor may they\nassume the server will send any BOOKMARK event during a session.\nIf this is not a watch, this field is ignored.\n+optional.", - "name": "listOptions.allowWatchBookmarks", + "description": "Return previous terminated container logs. Defaults to false.\n+optional.", + "name": "logOptions.previous", "in": "query" }, { "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersion", + "format": "int64", + "description": "A relative time in seconds before the current time from which to show logs. If this value\nprecedes the time a pod was started, only logs since the pod start will be returned.\nIf this value is in the future, no logs will be returned.\nOnly one of sinceSeconds or sinceTime may be specified.\n+optional.", + "name": "logOptions.sinceSeconds", "in": "query" }, { "type": "string", - "description": "resourceVersionMatch determines how resourceVersion is applied to list calls.\nIt is highly recommended that resourceVersionMatch be set for list calls where\nresourceVersion is set\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "listOptions.resourceVersionMatch", + "format": "int64", + "description": "Represents seconds of UTC time since Unix epoch\n1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to\n9999-12-31T23:59:59Z inclusive.", + "name": "logOptions.sinceTime.seconds", + "in": "query" + }, + { + "type": "integer", + "format": "int32", + "description": "Non-negative fractions of a second at nanosecond resolution. Negative\nsecond values with fractions must still have non-negative nanos values\nthat count forward in time. Must be from 0 to 999,999,999\ninclusive. This field may be limited in precision depending on context.", + "name": "logOptions.sinceTime.nanos", + "in": "query" + }, + { + "type": "boolean", + "description": "If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line\nof log output. Defaults to false.\n+optional.", + "name": "logOptions.timestamps", "in": "query" }, { "type": "string", "format": "int64", - "description": "Timeout for the list/watch call.\nThis limits the duration of the call, regardless of any activity or inactivity.\n+optional.", - "name": "listOptions.timeoutSeconds", + "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", + "name": "logOptions.tailLines", "in": "query" }, { "type": "string", "format": "int64", - "description": "limit is a maximum number of responses to return for a list call. If more items exist, the\nserver will set the `continue` field on the list metadata to a value that can be used with the\nsame initial query to retrieve the next set of results. Setting a limit may return fewer than\nthe requested amount of items (up to zero items) in the event all requested objects are\nfiltered out and clients should only use the presence of the continue field to determine whether\nmore results are available. Servers may choose not to support the limit argument and will return\nall of the available results. If limit is specified and the continue field is empty, clients may\nassume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing\na single list call without a limit - that is, no objects created, modified, or deleted after the\nfirst request is issued will be included in any subsequent continued requests. This is sometimes\nreferred to as a consistent snapshot, and ensures that a client that is using limit to receive\nsmaller chunks of a very large result can ensure they see all possible objects. If objects are\nupdated during a chunked list the version of the object that was present at the time the first list\nresult was calculated is returned.", - "name": "listOptions.limit", + "description": "If set, the number of bytes to read from the server before terminating the\nlog output. This may not display a complete final line of logging, and may return\nslightly more or slightly less than the specified limit.\n+optional.", + "name": "logOptions.limitBytes", + "in": "query" + }, + { + "type": "boolean", + "description": "insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the\nserving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver\nand the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real\nkubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the\nconnection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept\nthe actual log data coming from the real kubelet).\n+optional.", + "name": "logOptions.insecureSkipTLSVerifyBackend", "in": "query" }, { "type": "string", - "description": "The continue option should be set when retrieving more results from the server. Since this value is\nserver defined, clients may only use the continue value from a previous query result with identical\nquery parameters (except for the value of continue) and the server may reject a continue value it\ndoes not recognize. If the specified continue value is no longer valid whether due to expiration\n(generally five to fifteen minutes) or a configuration change on the server, the server will\nrespond with a 410 ResourceExpired error together with a continue token. If the client needs a\nconsistent list, it must restart their list without the continue field. Otherwise, the client may\nsend another list request with the token received with the 410 error, the server will respond with\na list starting from the next key, but from the latest snapshot, which is inconsistent from the\nprevious list results - objects that are created, modified, or deleted after the first list request\nwill be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last\nresourceVersion value returned by the server and not miss any modifications.", - "name": "listOptions.continue", + "name": "grep", "in": "query" }, { "type": "string", - "description": "Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".", - "name": "fields", + "name": "selector", "in": "query" } ], "responses": { "200": { - "description": "A successful response.", + "description": "A successful response.(streaming responses)", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowList" + "type": "object", + "title": "Stream result of io.argoproj.workflow.v1alpha1.LogEntry", + "properties": { + "error": { + "$ref": "#/definitions/grpc.gateway.runtime.StreamError" + }, + "result": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.LogEntry" + } + } } }, "default": { @@ -3381,12 +3227,14 @@ } } } - }, - "post": { + } + }, + "/api/v1/workflows/{namespace}/{name}/resubmit": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_CreateWorkflow", + "operationId": "WorkflowService_ResubmitWorkflow", "parameters": [ { "type": "string", @@ -3394,12 +3242,18 @@ "in": "path", "required": true }, + { + "type": "string", + "name": "name", + "in": "path", + "required": true + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowCreateRequest" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowResubmitRequest" } } ], @@ -3419,12 +3273,12 @@ } } }, - "/api/v1/workflows/{namespace}/lint": { - "post": { + "/api/v1/workflows/{namespace}/{name}/resume": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_LintWorkflow", + "operationId": "WorkflowService_ResumeWorkflow", "parameters": [ { "type": "string", @@ -3432,12 +3286,18 @@ "in": "path", "required": true }, + { + "type": "string", + "name": "name", + "in": "path", + "required": true + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowLintRequest" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowResumeRequest" } } ], @@ -3457,12 +3317,12 @@ } } }, - "/api/v1/workflows/{namespace}/submit": { - "post": { + "/api/v1/workflows/{namespace}/{name}/retry": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_SubmitWorkflow", + "operationId": "WorkflowService_RetryWorkflow", "parameters": [ { "type": "string", @@ -3470,12 +3330,18 @@ "in": "path", "required": true }, + { + "type": "string", + "name": "name", + "in": "path", + "required": true + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSubmitRequest" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowRetryRequest" } } ], @@ -3495,12 +3361,12 @@ } } }, - "/api/v1/workflows/{namespace}/{name}": { - "get": { + "/api/v1/workflows/{namespace}/{name}/set": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_GetWorkflow", + "operationId": "WorkflowService_SetWorkflow", "parameters": [ { "type": "string", @@ -3515,16 +3381,12 @@ "required": true }, { - "type": "string", - "description": "resourceVersion sets a constraint on what resource versions a request may be served from.\nSee https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for\ndetails.\n\nDefaults to unset\n+optional", - "name": "getOptions.resourceVersion", - "in": "query" - }, - { - "type": "string", - "description": "Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".", - "name": "fields", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSetRequest" + } } ], "responses": { @@ -3541,12 +3403,14 @@ } } } - }, - "delete": { + } + }, + "/api/v1/workflows/{namespace}/{name}/stop": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_DeleteWorkflow", + "operationId": "WorkflowService_StopWorkflow", "parameters": [ { "type": "string", @@ -3561,52 +3425,63 @@ "required": true }, { - "type": "string", - "format": "int64", - "description": "The duration in seconds before the object should be deleted. Value must be non-negative integer.\nThe value zero indicates delete immediately. If this value is nil, the default grace period for the\nspecified type will be used.\nDefaults to a per object value if not specified. zero means delete immediately.\n+optional.", - "name": "deleteOptions.gracePeriodSeconds", - "in": "query" - }, - { - "type": "string", - "description": "Specifies the target UID.\n+optional.", - "name": "deleteOptions.preconditions.uid", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowStopRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + } }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/api/v1/workflows/{namespace}/{name}/suspend": { + "put": { + "tags": [ + "WorkflowService" + ], + "operationId": "WorkflowService_SuspendWorkflow", + "parameters": [ { "type": "string", - "description": "Specifies the target ResourceVersion\n+optional.", - "name": "deleteOptions.preconditions.resourceVersion", - "in": "query" - }, - { - "type": "boolean", - "description": "Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7.\nShould the dependent objects be orphaned. If true/false, the \"orphan\"\nfinalizer will be added to/removed from the object's finalizers list.\nEither this field or PropagationPolicy may be set, but not both.\n+optional.", - "name": "deleteOptions.orphanDependents", - "in": "query" + "name": "namespace", + "in": "path", + "required": true }, { "type": "string", - "description": "Whether and how garbage collection will be performed.\nEither this field or OrphanDependents may be set, but not both.\nThe default policy is decided by the existing finalizer set in the\nmetadata.finalizers and the resource-specific default policy.\nAcceptable values are: 'Orphan' - orphan the dependents; 'Background' -\nallow the garbage collector to delete the dependents in the background;\n'Foreground' - a cascading policy that deletes all dependents in the\nforeground.\n+optional.", - "name": "deleteOptions.propagationPolicy", - "in": "query" + "name": "name", + "in": "path", + "required": true }, { - "type": "array", - "items": { - "type": "string" - }, - "collectionFormat": "multi", - "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional.", - "name": "deleteOptions.dryRun", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSuspendRequest" + } } ], "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowDeleteResponse" + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" } }, "default": { @@ -3618,12 +3493,12 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/log": { - "get": { + "/api/v1/workflows/{namespace}/{name}/terminate": { + "put": { "tags": [ "WorkflowService" ], - "operationId": "WorkflowService_WorkflowLogs", + "operationId": "WorkflowService_TerminateWorkflow", "parameters": [ { "type": "string", @@ -3638,17 +3513,63 @@ "required": true }, { - "type": "string", - "name": "podName", - "in": "query" - }, - { - "type": "string", - "description": "The container for which to stream logs. Defaults to only container if there is one container in the pod.\n+optional.", - "name": "logOptions.container", - "in": "query" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTerminateRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + } }, - { + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/api/v1/workflows/{namespace}/{name}/{podName}/log": { + "get": { + "tags": [ + "WorkflowService" + ], + "summary": "DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs.", + "operationId": "WorkflowService_PodLogs", + "parameters": [ + { + "type": "string", + "name": "namespace", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "name", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "podName", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "The container for which to stream logs. Defaults to only container if there is one container in the pod.\n+optional.", + "name": "logOptions.container", + "in": "query" + }, + { "type": "boolean", "description": "Follow the log stream of the pod. Defaults to false.\n+optional.", "name": "logOptions.follow", @@ -3743,12 +3664,13 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/resubmit": { - "put": { + "/artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName}": { + "get": { "tags": [ - "WorkflowService" + "ArtifactService" ], - "operationId": "WorkflowService_ResubmitWorkflow", + "summary": "Get an artifact.", + "operationId": "ArtifactService_GetArtifactFile", "parameters": [ { "type": "string", @@ -3757,113 +3679,49 @@ "required": true }, { + "enum": [ + "workflow", + "archived-workflows " + ], "type": "string", - "name": "name", + "name": "idDiscriminator", "in": "path", "required": true }, - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowResubmitRequest" - } - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/api/v1/workflows/{namespace}/{name}/resume": { - "put": { - "tags": [ - "WorkflowService" - ], - "operationId": "WorkflowService_ResumeWorkflow", - "parameters": [ { "type": "string", - "name": "namespace", + "name": "id", "in": "path", "required": true }, { "type": "string", - "name": "name", + "name": "nodeId", "in": "path", "required": true }, - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowResumeRequest" - } - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/api/v1/workflows/{namespace}/{name}/retry": { - "put": { - "tags": [ - "WorkflowService" - ], - "operationId": "WorkflowService_RetryWorkflow", - "parameters": [ { "type": "string", - "name": "namespace", + "name": "artifactName", "in": "path", "required": true }, { + "enum": [ + "outputs" + ], "type": "string", - "name": "name", + "name": "artifactDiscriminator", "in": "path", "required": true - }, - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowRetryRequest" - } } ], "responses": { "200": { - "description": "A successful response.", + "description": "An artifact file.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + "type": "string", + "format": "binary" } }, "default": { @@ -3875,39 +3733,39 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/set": { - "put": { + "/artifacts-by-uid/{uid}/{nodeId}/{artifactName}": { + "get": { "tags": [ - "WorkflowService" + "ArtifactService" ], - "operationId": "WorkflowService_SetWorkflow", + "summary": "Get an output artifact by UID.", + "operationId": "ArtifactService_GetOutputArtifactByUID", "parameters": [ { "type": "string", - "name": "namespace", + "name": "uid", "in": "path", "required": true }, { "type": "string", - "name": "name", + "name": "nodeId", "in": "path", "required": true }, { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSetRequest" - } + "type": "string", + "name": "artifactName", + "in": "path", + "required": true } ], "responses": { "200": { - "description": "A successful response.", + "description": "An artifact file.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + "type": "string", + "format": "binary" } }, "default": { @@ -3919,12 +3777,13 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/stop": { - "put": { + "/artifacts/{namespace}/{name}/{nodeId}/{artifactName}": { + "get": { "tags": [ - "WorkflowService" + "ArtifactService" ], - "operationId": "WorkflowService_StopWorkflow", + "summary": "Get an output artifact.", + "operationId": "ArtifactService_GetOutputArtifact", "parameters": [ { "type": "string", @@ -3938,64 +3797,25 @@ "in": "path", "required": true }, - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowStopRequest" - } - } - ], - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/api/v1/workflows/{namespace}/{name}/suspend": { - "put": { - "tags": [ - "WorkflowService" - ], - "operationId": "WorkflowService_SuspendWorkflow", - "parameters": [ { "type": "string", - "name": "namespace", + "name": "nodeId", "in": "path", "required": true }, { "type": "string", - "name": "name", + "name": "artifactName", "in": "path", "required": true - }, - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowSuspendRequest" - } } ], "responses": { "200": { - "description": "A successful response.", + "description": "An artifact file.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + "type": "string", + "format": "binary" } }, "default": { @@ -4007,39 +3827,39 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/terminate": { - "put": { + "/input-artifacts-by-uid/{uid}/{nodeId}/{artifactName}": { + "get": { "tags": [ - "WorkflowService" + "ArtifactService" ], - "operationId": "WorkflowService_TerminateWorkflow", + "summary": "Get an input artifact by UID.", + "operationId": "ArtifactService_GetInputArtifactByUID", "parameters": [ { "type": "string", - "name": "namespace", + "name": "uid", "in": "path", "required": true }, { "type": "string", - "name": "name", + "name": "nodeId", "in": "path", "required": true }, { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowTerminateRequest" - } + "type": "string", + "name": "artifactName", + "in": "path", + "required": true } ], "responses": { "200": { - "description": "A successful response.", + "description": "An artifact file.", "schema": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Workflow" + "type": "string", + "format": "binary" } }, "default": { @@ -4051,13 +3871,13 @@ } } }, - "/api/v1/workflows/{namespace}/{name}/{podName}/log": { + "/input-artifacts/{namespace}/{name}/{nodeId}/{artifactName}": { "get": { "tags": [ - "WorkflowService" + "ArtifactService" ], - "summary": "DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs.", - "operationId": "WorkflowService_PodLogs", + "summary": "Get an input artifact.", + "operationId": "ArtifactService_GetInputArtifact", "parameters": [ { "type": "string", @@ -4073,1517 +3893,101 @@ }, { "type": "string", - "name": "podName", + "name": "nodeId", "in": "path", "required": true }, { "type": "string", - "description": "The container for which to stream logs. Defaults to only container if there is one container in the pod.\n+optional.", - "name": "logOptions.container", - "in": "query" - }, - { - "type": "boolean", - "description": "Follow the log stream of the pod. Defaults to false.\n+optional.", - "name": "logOptions.follow", - "in": "query" + "name": "artifactName", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "An artifact file.", + "schema": { + "type": "string", + "format": "binary" + } }, - { - "type": "boolean", - "description": "Return previous terminated container logs. Defaults to false.\n+optional.", - "name": "logOptions.previous", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "A relative time in seconds before the current time from which to show logs. If this value\nprecedes the time a pod was started, only logs since the pod start will be returned.\nIf this value is in the future, no logs will be returned.\nOnly one of sinceSeconds or sinceTime may be specified.\n+optional.", - "name": "logOptions.sinceSeconds", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "Represents seconds of UTC time since Unix epoch\n1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to\n9999-12-31T23:59:59Z inclusive.", - "name": "logOptions.sinceTime.seconds", - "in": "query" - }, - { - "type": "integer", - "format": "int32", - "description": "Non-negative fractions of a second at nanosecond resolution. Negative\nsecond values with fractions must still have non-negative nanos values\nthat count forward in time. Must be from 0 to 999,999,999\ninclusive. This field may be limited in precision depending on context.", - "name": "logOptions.sinceTime.nanos", - "in": "query" - }, - { - "type": "boolean", - "description": "If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line\nof log output. Defaults to false.\n+optional.", - "name": "logOptions.timestamps", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", - "name": "logOptions.tailLines", - "in": "query" - }, - { - "type": "string", - "format": "int64", - "description": "If set, the number of bytes to read from the server before terminating the\nlog output. This may not display a complete final line of logging, and may return\nslightly more or slightly less than the specified limit.\n+optional.", - "name": "logOptions.limitBytes", - "in": "query" - }, - { - "type": "boolean", - "description": "insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the\nserving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver\nand the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real\nkubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the\nconnection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept\nthe actual log data coming from the real kubelet).\n+optional.", - "name": "logOptions.insecureSkipTLSVerifyBackend", - "in": "query" - }, - { - "type": "string", - "name": "grep", - "in": "query" - }, - { - "type": "string", - "name": "selector", - "in": "query" - } - ], - "responses": { - "200": { - "description": "A successful response.(streaming responses)", - "schema": { - "type": "object", - "title": "Stream result of io.argoproj.workflow.v1alpha1.LogEntry", - "properties": { - "error": { - "$ref": "#/definitions/grpc.gateway.runtime.StreamError" - }, - "result": { - "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.LogEntry" - } - } - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/artifacts-by-uid/{uid}/{podName}/{artifactName}": { - "get": { - "tags": [ - "ArtifactService" - ], - "summary": "Get an output artifact by UID.", - "operationId": "ArtifactService_GetOutputArtifactByUID", - "parameters": [ - { - "type": "string", - "name": "uid", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "podName", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "artifactName", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "An artifact file." - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/artifacts/{namespace}/{name}/{podName}/{artifactName}": { - "get": { - "tags": [ - "ArtifactService" - ], - "summary": "Get an output artifact.", - "operationId": "ArtifactService_GetOutputArtifact", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "name", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "podName", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "artifactName", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "An artifact file." - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/input-artifacts-by-uid/{uid}/{podName}/{artifactName}": { - "get": { - "tags": [ - "ArtifactService" - ], - "summary": "Get an input artifact by UID.", - "operationId": "ArtifactService_GetInputArtifactByUID", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "uid", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "podName", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "artifactName", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "An artifact file." - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - }, - "/input-artifacts/{namespace}/{name}/{podName}/{artifactName}": { - "get": { - "tags": [ - "ArtifactService" - ], - "summary": "Get an input artifact.", - "operationId": "ArtifactService_GetInputArtifact", - "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "name", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "podName", - "in": "path", - "required": true - }, - { - "type": "string", - "name": "artifactName", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "An artifact file." - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/grpc.gateway.runtime.Error" - } - } - } - } - } - }, - "definitions": { - "eventsource.CreateEventSourceRequest": { - "type": "object", - "properties": { - "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" - }, - "namespace": { - "type": "string" - } - } - }, - "eventsource.EventSourceDeletedResponse": { - "type": "object" - }, - "eventsource.EventSourceWatchEvent": { - "type": "object", - "properties": { - "object": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" - }, - "type": { - "type": "string" - } - } - }, - "eventsource.LogEntry": { - "type": "object", - "title": "structured log entry", - "properties": { - "eventName": { - "type": "string", - "title": "optional - the event name (e.g. `example`)" - }, - "eventSourceName": { - "type": "string" - }, - "eventSourceType": { - "type": "string", - "title": "optional - the event source type (e.g. `webhook`)" - }, - "level": { - "type": "string" - }, - "msg": { - "type": "string" - }, - "namespace": { - "type": "string" - }, - "time": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - } - } - }, - "eventsource.UpdateEventSourceRequest": { - "type": "object", - "properties": { - "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" - }, - "name": { - "type": "string" - }, - "namespace": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSCredentials": { - "type": "object", - "properties": { - "accessKeyId": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "secretAccessKey": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "sessionToken": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSEndpoint": { - "type": "object", - "properties": { - "url": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep": { - "type": "object", - "properties": { - "resources": { - "title": "+kubebuilder:default={limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}", - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource": { - "type": "object", - "properties": { - "awsElasticBlockStore": { - "title": "AWSElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.AWSElasticBlockStoreVolumeSource" - }, - "azureDisk": { - "title": "AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.AzureDiskVolumeSource" - }, - "azureFile": { - "title": "AzureFile represents an Azure File Service mount on the host and bind mount to the pod.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.AzureFileVolumeSource" - }, - "cephfs": { - "title": "CephFS represents a Ceph FS mount on the host that shares a pod's lifetime\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.CephFSVolumeSource" - }, - "cinder": { - "title": "Cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.io.k8s.mysql-cinder-pd/README.md\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.CinderVolumeSource" - }, - "configMap": { - "title": "ConfigMap represents a configMap that should populate this volume\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapVolumeSource" - }, - "csi": { - "title": "CSI (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.CSIVolumeSource" - }, - "downwardAPI": { - "title": "DownwardAPI represents downward API about the pod that should populate this volume\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.DownwardAPIVolumeSource" - }, - "emptyDir": { - "title": "EmptyDir represents a temporary directory that shares a pod's lifetime.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.EmptyDirVolumeSource" - }, - "ephemeral": { - "description": "Ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.\n\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.EphemeralVolumeSource" - }, - "fc": { - "title": "FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.FCVolumeSource" - }, - "flexVolume": { - "title": "FlexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.FlexVolumeSource" - }, - "flocker": { - "title": "Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.FlockerVolumeSource" - }, - "gcePersistentDisk": { - "title": "GCEPersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.GCEPersistentDiskVolumeSource" - }, - "gitRepo": { - "title": "GitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.GitRepoVolumeSource" - }, - "glusterfs": { - "title": "Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.io.k8s.volumes/glusterfs/README.md\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.GlusterfsVolumeSource" - }, - "hostPath": { - "title": "HostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.HostPathVolumeSource" - }, - "iscsi": { - "title": "ISCSI represents an ISCSI Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://examples.io.k8s.volumes/iscsi/README.md\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.ISCSIVolumeSource" - }, - "nfs": { - "title": "NFS represents an NFS mount on the host that shares a pod's lifetime\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.NFSVolumeSource" - }, - "persistentVolumeClaim": { - "title": "PersistentVolumeClaimVolumeSource represents a reference to a\nPersistentVolumeClaim in the same namespace.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.PersistentVolumeClaimVolumeSource" - }, - "photonPersistentDisk": { - "title": "PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", - "$ref": "#/definitions/io.k8s.api.core.v1.PhotonPersistentDiskVolumeSource" - }, - "portworxVolume": { - "title": "PortworxVolume represents a portworx volume attached and mounted on kubelets host machine\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.PortworxVolumeSource" - }, - "projected": { - "title": "Items for all in one resources secrets, configmaps, and downward API", - "$ref": "#/definitions/io.k8s.api.core.v1.ProjectedVolumeSource" - }, - "quobyte": { - "title": "Quobyte represents a Quobyte mount on the host that shares a pod's lifetime\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.QuobyteVolumeSource" - }, - "rbd": { - "title": "RBD represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.io.k8s.volumes/rbd/README.md\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.RBDVolumeSource" - }, - "scaleIO": { - "title": "ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.ScaleIOVolumeSource" - }, - "secret": { - "title": "Secret represents a secret that should populate this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretVolumeSource" - }, - "storageos": { - "title": "StorageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.StorageOSVolumeSource" - }, - "vsphereVolume": { - "title": "VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.VsphereVirtualDiskVolumeSource" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Backoff": { - "type": "object", - "properties": { - "FactorPercentage": { - "type": "integer", - "title": "+kubebuilder:default=200" - }, - "cap": { - "title": "+kubebuilder:default=\"0ms\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "duration": { - "title": "+kubebuilder:default=\"100ms\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "jitterPercentage": { - "type": "integer", - "title": "the amount of jitter per step, typically 10-20%, \u003e100% is valid, but strange\n+kubebuilder:default=10" - }, - "steps": { - "type": "string", - "format": "uint64", - "title": "the number of backoff steps, zero means no retries\n+kubebuilder:default=20" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cat": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Code": { - "type": "object", - "properties": { - "image": { - "description": "Image is used in preference to Runtime.", - "type": "string" - }, - "runtime": { - "type": "string" - }, - "source": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Container": { - "type": "object", - "properties": { - "args": { - "type": "array", - "items": { - "type": "string" - } - }, - "command": { - "type": "array", - "items": { - "type": "string" - } - }, - "env": { - "type": "array", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" - } - }, - "image": { - "type": "string" - }, - "in": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Interface" - }, - "resources": { - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements" - }, - "volumeMounts": { - "type": "array", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.VolumeMount" - } - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cron": { - "type": "object", - "properties": { - "layout": { - "type": "string", - "title": "+kubebuilder:default=\"2006-01-02T15:04:05Z07:00\"" - }, - "schedule": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSource": { - "type": "object", - "properties": { - "value": { - "type": "string" - }, - "valueFrom": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSourceFrom" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSourceFrom": { - "type": "object", - "properties": { - "secretKeyRef": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSink": { - "type": "object", - "properties": { - "actions": { - "type": "array", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLAction" - } - }, - "database": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSource": { - "type": "object", - "properties": { - "commitInterval": { - "title": "+kubebuilder:default=\"5s\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "database": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database" - }, - "initSchema": { - "type": "boolean", - "title": "+kubebuilder:default=true" - }, - "offsetColumn": { - "type": "string" - }, - "pollInterval": { - "title": "+kubebuilder:default=\"1s\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "query": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Database": { - "type": "object", - "properties": { - "dataSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBDataSource" - }, - "driver": { - "type": "string", - "title": "+kubebuilder:default=default" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Dedupe": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "maxSize": { - "title": "MaxSize is the maximum number of entries to keep in the in-memory database used to store recent UIDs.\nLarger number mean bigger windows of time for dedupe, but greater memory usage.\n+kubebuilder:default=\"1M\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity" - }, - "uid": { - "type": "string", - "title": "+kubebuilder:default=\"sha1(msg)\"" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Expand": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Filter": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "expression": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Flatten": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Git": { - "type": "object", - "properties": { - "branch": { - "type": "string", - "title": "+kubebuilder:default=main" - }, - "command": { - "type": "array", - "items": { - "type": "string" - } - }, - "env": { - "type": "array", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" - } - }, - "image": { - "type": "string" - }, - "insecureIgnoreHostKey": { - "type": "boolean", - "title": "InsecureIgnoreHostKey is the bool value for ignoring check for host key" - }, - "passwordSecret": { - "title": "PasswordSecret is the secret selector to the repository password", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "path": { - "description": "+kubebuilder:default=.", - "type": "string" - }, - "sshPrivateKeySecret": { - "title": "SSHPrivateKeySecret is the secret selector to the repository ssh private key", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "url": { - "type": "string" - }, - "usernameSecret": { - "title": "UsernameSecret is the secret selector to the repository username", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Group": { - "type": "object", - "properties": { - "endOfGroup": { - "type": "string" - }, - "format": { - "type": "string" - }, - "key": { - "type": "string" - }, - "storage": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Storage" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTP": { - "type": "object" - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeader": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - }, - "valueFrom": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeaderSource" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeaderSource": { - "type": "object", - "properties": { - "secretKeyRef": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSink": { - "type": "object", - "properties": { - "headers": { - "type": "array", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPHeader" - } - }, - "insecureSkipVerify": { - "type": "boolean" - }, - "url": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSource": { - "type": "object", - "properties": { - "serviceName": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Interface": { - "type": "object", - "properties": { - "fifo": { - "type": "boolean" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTP" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream": { - "type": "object", - "properties": { - "auth": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "natsUrl": { - "type": "string" - }, - "subject": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSink": { - "type": "object", - "properties": { - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSource": { - "type": "object", - "properties": { - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStream" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka": { - "type": "object", - "properties": { - "kafkaConfig": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaConfig" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "topic": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaConfig": { - "type": "object", - "properties": { - "brokers": { - "type": "array", - "items": { - "type": "string" - } - }, - "maxMessageBytes": { - "type": "integer" - }, - "net": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaNET" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaNET": { - "type": "object", - "properties": { - "sasl": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SASL" - }, - "tls": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.TLS" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSink": { - "type": "object", - "properties": { - "acks": { - "title": "+kubebuilder:default=\"all\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.util.intstr.IntOrString" - }, - "async": { - "type": "boolean" - }, - "batchSize": { - "title": "+kubebuilder:default=\"100Ki\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity" - }, - "compressionType": { - "type": "string", - "title": "+kubebuilder:default=\"lz4\"" - }, - "enableIdempotence": { - "type": "boolean", - "title": "+kubebuilder:default=true" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka" - }, - "linger": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "maxInflight": { - "type": "integer", - "title": "The maximum number of messages to be in-flight when async.\n+kubebuilder:default=20" - }, - "messageTimeout": { - "title": "+kubebuilder:default=\"30s\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSource": { - "type": "object", - "properties": { - "fetchMin": { - "title": "+kubebuilder:default=\"100Ki\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.api.resource.Quantity" - }, - "fetchWaitMax": { - "title": "+kubebuilder:default=\"500ms\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "groupId": { - "description": "GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated.", - "type": "string" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Kafka" - }, - "startOffset": { - "type": "string", - "title": "+kubebuilder:default=Last" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Log": { - "type": "object", - "properties": { - "truncate": { - "type": "string", - "format": "uint64" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Map": { - "type": "object", - "properties": { - "abstractStep": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractStep" - }, - "expression": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Metadata": { - "type": "object", - "properties": { - "annotations": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "labels": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth": { - "type": "object", - "properties": { - "token": { - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline": { - "type": "object", - "title": "+kubebuilder:object:root=true\n+kubebuilder:resource:shortName=pl\n+kubebuilder:subresource:status\n+kubebuilder:printcolumn:name=\"Phase\",type=string,JSONPath=`.status.phase`\n+kubebuilder:printcolumn:name=\"Message\",type=string,JSONPath=`.status.message`", - "properties": { - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" - }, - "spec": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineSpec" - }, - "status": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineStatus" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineList": { - "type": "object", - "properties": { - "items": { - "type": "array", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline" - } - }, - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ListMeta" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineSpec": { - "type": "object", - "properties": { - "deletionDelay": { - "title": "+kubebuilder:default=\"72h\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "steps": { - "type": "array", - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec" - } - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineStatus": { - "type": "object", - "properties": { - "conditions": { - "type": "array", - "items": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Condition" - } - }, - "lastUpdated": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - }, - "message": { - "type": "string" - }, - "phase": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3": { - "type": "object", - "properties": { - "bucket": { - "type": "string" - }, - "credentials": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSCredentials" - }, - "endpoint": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AWSEndpoint" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "region": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Sink": { - "type": "object", - "properties": { - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Source": { - "type": "object", - "properties": { - "concurrency": { - "type": "integer", - "title": "+kubebuilder:default=1" - }, - "pollPeriod": { - "title": "+kubebuilder:default=\"1m\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SASL": { - "type": "object", - "properties": { - "mechanism": { - "type": "string", - "title": "SASLMechanism is the name of the enabled SASL mechanism.\nPossible values: OAUTHBEARER, PLAIN (defaults to PLAIN).\n+optional" - }, - "password": { - "title": "Password for SASL/PLAIN authentication", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "user": { - "title": "User is the authentication identity (authcid) to present for\nSASL/PLAIN or SASL/SCRAM authentication", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLAction": { - "type": "object", - "properties": { - "onError": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - }, - "onRecordNotFound": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - }, - "statement": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.SQLStatement": { - "type": "object", - "properties": { - "args": { - "type": "array", - "items": { - "type": "string" - } - }, - "sql": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN": { - "type": "object", - "properties": { - "auth": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.NATSAuth" - }, - "clusterId": { - "type": "string" - }, - "maxInflight": { - "type": "integer", - "title": "Max inflight messages when subscribing to the stan server, which means how many messages\nbetween commits, therefore potential duplicates during disruption\n+kubebuilder:default=20" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "natsMonitoringUrl": { - "type": "string" - }, - "natsUrl": { - "type": "string" - }, - "subject": { - "type": "string" - }, - "subjectPrefix": { - "type": "string" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Scale": { - "type": "object", - "properties": { - "desiredReplicas": { - "description": "An expression to determine the number of replicas. Must evaluation to an `int`.", - "type": "string" - }, - "peekDelay": { - "type": "string", - "title": "An expression to determine the delay for peeking. Maybe string or duration, e.g. `\"4m\"`\n+kubebuilder:default=\"defaultPeekDelay\"" - }, - "scalingDelay": { - "type": "string", - "title": "An expression to determine the delay for scaling. Maybe string or duration, e.g. `\"1m\"`\n+kubebuilder:default=\"defaultScalingDelay\"" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sidecar": { - "type": "object", - "properties": { - "resources": { - "title": "+kubebuilder:default={limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}", - "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sink": { - "type": "object", - "properties": { - "db": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSink" - }, - "deadLetterQueue": { - "type": "boolean" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSink" - }, - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSink" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSink" - }, - "log": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Log" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Sink" - }, - "stan": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN" - }, - "volume": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSink" + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } } } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Source": { + } + }, + "definitions": { + "eventsource.CreateEventSourceRequest": { "type": "object", "properties": { - "cron": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cron" - }, - "db": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.DBSource" - }, - "http": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.HTTPSource" - }, - "jetstream": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.JetStreamSource" - }, - "kafka": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.KafkaSource" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "retry": { - "title": "+kubebuilder:default={duration: \"100ms\", steps: 20, factorPercentage: 200, jitterPercentage: 10}", - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Backoff" - }, - "s3": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.S3Source" - }, - "stan": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.STAN" + "eventSource": { + "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" }, - "volume": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSource" + "namespace": { + "type": "string" } } }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Step": { + "eventsource.EventSourceDeletedResponse": { + "type": "object" + }, + "eventsource.EventSourceWatchEvent": { "type": "object", - "title": "+kubebuilder:object:root=true\n+kubebuilder:subresource:status\n+kubebuilder:subresource:scale:specpath=.spec.replicas,statuspath=.status.replicas,selectorpath=.status.selector\n+kubebuilder:printcolumn:name=\"Phase\",type=string,JSONPath=`.status.phase`\n+kubebuilder:printcolumn:name=\"Reason\",type=string,JSONPath=`.status.reason`\n+kubebuilder:printcolumn:name=\"Message\",type=string,JSONPath=`.status.message`\n+kubebuilder:printcolumn:name=\"Desired\",type=string,JSONPath=`.spec.replicas`\n+kubebuilder:printcolumn:name=\"Current\",type=string,JSONPath=`.status.replicas`", "properties": { - "metadata": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" - }, - "spec": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec" + "object": { + "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" }, - "status": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepStatus" + "type": { + "type": "string" } } }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepSpec": { + "eventsource.LogEntry": { "type": "object", + "title": "structured log entry", "properties": { - "affinity": { - "$ref": "#/definitions/io.k8s.api.core.v1.Affinity" - }, - "cat": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Cat" - }, - "code": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Code" - }, - "container": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Container" - }, - "dedupe": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Dedupe" - }, - "expand": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Expand" - }, - "filter": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Filter" - }, - "flatten": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Flatten" - }, - "git": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Git" - }, - "group": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Group" - }, - "imagePullSecrets": { - "type": "array", - "title": "ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images\nin pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets\ncan be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet.\nMore info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod\n+patchStrategy=merge\n+patchMergeKey=name", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.LocalObjectReference" - } - }, - "map": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Map" - }, - "metadata": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Metadata" - }, - "name": { - "type": "string", - "title": "+kubebuilder:default=default" - }, - "nodeSelector": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "replicas": { - "type": "integer", - "title": "+kubebuilder:default=1" - }, - "restartPolicy": { + "eventName": { "type": "string", - "title": "+kubebuilder:default=OnFailure" + "title": "optional - the event name (e.g. `example`)" }, - "scale": { - "title": "+kubebuilder:default={peekDelay: \"defaultPeekDelay\", scalingDelay: \"defaultScalingDelay\", desiredReplicas: \"\"}", - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Scale" + "eventSourceName": { + "type": "string" }, - "serviceAccountName": { + "eventSourceType": { "type": "string", - "title": "+kubebuilder:default=pipeline" - }, - "sidecar": { - "title": "+kubebuilder:default={resources: {limits: {\"cpu\": \"500m\", \"memory\": \"256Mi\"}, requests: {\"cpu\": \"100m\", \"memory\": \"64Mi\"}}}", - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sidecar" - }, - "sinks": { - "type": "array", - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Sink" - } - }, - "sources": { - "type": "array", - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "items": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Source" - } - }, - "terminator": { - "type": "boolean" - }, - "tolerations": { - "type": "array", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.Toleration" - } - }, - "volumes": { - "type": "array", - "title": "+patchStrategy=merge\n+patchMergeKey=name", - "items": { - "$ref": "#/definitions/io.k8s.api.core.v1.Volume" - } - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.StepStatus": { - "type": "object", - "properties": { - "lastScaledAt": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" + "title": "optional - the event source type (e.g. `webhook`)" }, - "message": { + "level": { "type": "string" }, - "phase": { + "msg": { "type": "string" }, - "reason": { + "namespace": { "type": "string" }, - "replicas": { - "type": "integer" - }, - "selector": { - "type": "string" + "time": { + "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" } } }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Storage": { + "eventsource.UpdateEventSourceRequest": { "type": "object", "properties": { + "eventSource": { + "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + }, "name": { "type": "string" }, - "subPath": { - "type": "string", - "title": "volume name" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.TLS": { - "type": "object", - "properties": { - "caCertSecret": { - "title": "CACertSecret refers to the secret that contains the CA cert", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "certSecret": { - "title": "CertSecret refers to the secret that contains the cert", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - }, - "keySecret": { - "title": "KeySecret refers to the secret that contains the key", - "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSink": { - "type": "object", - "properties": { - "abstractVolumeSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource" - } - } - }, - "github.com.argoproj_labs.argo_dataflow.api.v1alpha1.VolumeSource": { - "type": "object", - "properties": { - "abstractVolumeSource": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.AbstractVolumeSource" - }, - "concurrency": { - "type": "integer", - "title": "+kubebuilder:default=1" - }, - "pollPeriod": { - "title": "+kubebuilder:default=\"1m\"", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Duration" - }, - "readOnly": { - "type": "boolean" + "namespace": { + "type": "string" } } }, @@ -8841,6 +7245,30 @@ } } }, + "io.argoproj.workflow.v1alpha1.ArtGCStatus": { + "description": "ArtGCStatus maintains state related to ArtifactGC", + "type": "object", + "properties": { + "notSpecified": { + "description": "if this is true, we already checked to see if we need to do it and we don't", + "type": "boolean" + }, + "podsRecouped": { + "description": "have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once", + "type": "object", + "additionalProperties": { + "type": "boolean" + } + }, + "strategiesProcessed": { + "description": "have Pods been started to perform this strategy? (enables us not to re-process what we've already done)", + "type": "object", + "additionalProperties": { + "type": "boolean" + } + } + } + }, "io.argoproj.workflow.v1alpha1.Artifact": { "description": "Artifact indicates an artifact to place at a specified path", "type": "object", @@ -8856,10 +7284,22 @@ "description": "ArchiveLogs indicates if the container logs should be archived", "type": "boolean" }, + "artifactGC": { + "description": "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC" + }, "artifactory": { "description": "Artifactory contains artifactory artifact location details", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact" }, + "azure": { + "description": "Azure contains Azure Storage artifact location details", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact" + }, + "deleted": { + "description": "Has this been deleted?", + "type": "boolean" + }, "from": { "description": "From allows an artifact to reference an artifact from a previous step", "type": "string" @@ -8926,6 +7366,50 @@ } } }, + "io.argoproj.workflow.v1alpha1.ArtifactGC": { + "description": "ArtifactGC describes how to delete artifacts from completed Workflows", + "type": "object", + "properties": { + "podMetadata": { + "description": "PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Metadata" + }, + "serviceAccountName": { + "description": "ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion", + "type": "string" + }, + "strategy": { + "description": "Strategy is the strategy to use.", + "type": "string" + } + } + }, + "io.argoproj.workflow.v1alpha1.ArtifactGCSpec": { + "description": "ArtifactGCSpec specifies the Artifacts that need to be deleted", + "type": "object", + "properties": { + "artifactsByNode": { + "description": "ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactNodeSpec" + } + } + } + }, + "io.argoproj.workflow.v1alpha1.ArtifactGCStatus": { + "description": "ArtifactGCStatus describes the result of the deletion", + "type": "object", + "properties": { + "artifactResultsByNode": { + "description": "ArtifactResultsByNode maps Node name to result", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactResultNodeStatus" + } + } + } + }, "io.argoproj.workflow.v1alpha1.ArtifactLocation": { "description": "ArtifactLocation describes a location for a single or multiple artifacts. It is used as single artifact in the context of inputs/outputs (e.g. outputs.artifacts.artname). It is also used to describe the location of multiple artifacts such as the archive location of a single workflow step, which the executor will use as a default location to store its files.", "type": "object", @@ -8938,6 +7422,10 @@ "description": "Artifactory contains artifactory artifact location details", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact" }, + "azure": { + "description": "Azure contains Azure Storage artifact location details", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact" + }, "gcs": { "description": "GCS contains GCS artifact location details", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GCSArtifact" @@ -8968,6 +7456,23 @@ } } }, + "io.argoproj.workflow.v1alpha1.ArtifactNodeSpec": { + "description": "ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node", + "type": "object", + "properties": { + "archiveLocation": { + "description": "ArchiveLocation is the template-level Artifact location specification", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactLocation" + }, + "artifacts": { + "description": "Artifacts maps artifact name to Artifact description", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Artifact" + } + } + } + }, "io.argoproj.workflow.v1alpha1.ArtifactPaths": { "description": "ArtifactPaths expands a step from a collection of artifacts", "type": "object", @@ -8983,10 +7488,22 @@ "description": "ArchiveLogs indicates if the container logs should be archived", "type": "boolean" }, + "artifactGC": { + "description": "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC" + }, "artifactory": { "description": "Artifactory contains artifactory artifact location details", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifact" }, + "azure": { + "description": "Azure contains Azure Storage artifact location details", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifact" + }, + "deleted": { + "description": "Has this been deleted?", + "type": "boolean" + }, "from": { "description": "From allows an artifact to reference an artifact from a previous step", "type": "string" @@ -9065,6 +7582,10 @@ "description": "Artifactory stores artifacts to JFrog Artifactory", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactoryArtifactRepository" }, + "azure": { + "description": "Azure stores artifact in an Azure Storage account", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.AzureArtifactRepository" + }, "gcs": { "description": "GCS stores artifact in a GCS object store", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.GCSArtifactRepository" @@ -9121,6 +7642,40 @@ } } }, + "io.argoproj.workflow.v1alpha1.ArtifactResult": { + "description": "ArtifactResult describes the result of attempting to delete a given Artifact", + "type": "object", + "required": [ + "name" + ], + "properties": { + "error": { + "description": "Error is an optional error message which should be set if Success==false", + "type": "string" + }, + "name": { + "description": "Name is the name of the Artifact", + "type": "string" + }, + "success": { + "description": "Success describes whether the deletion succeeded", + "type": "boolean" + } + } + }, + "io.argoproj.workflow.v1alpha1.ArtifactResultNodeStatus": { + "description": "ArtifactResultNodeStatus describes the result of the deletion on a given node", + "type": "object", + "properties": { + "artifactResults": { + "description": "ArtifactResults maps Artifact name to result of the deletion", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactResult" + } + } + } + }, "io.argoproj.workflow.v1alpha1.ArtifactoryArtifact": { "description": "ArtifactoryArtifact is the location of an artifactory artifact", "type": "object", @@ -9160,6 +7715,67 @@ } } }, + "io.argoproj.workflow.v1alpha1.AzureArtifact": { + "description": "AzureArtifact is the location of a an Azure Storage artifact", + "type": "object", + "required": [ + "endpoint", + "container", + "blob" + ], + "properties": { + "accountKeySecret": { + "description": "AccountKeySecret is the secret selector to the Azure Blob Storage account access key", + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "blob": { + "description": "Blob is the blob name (i.e., path) in the container where the artifact resides", + "type": "string" + }, + "container": { + "description": "Container is the container where resources will be stored", + "type": "string" + }, + "endpoint": { + "description": "Endpoint is the service url associated with an account. It is most likely \"https://\u003cACCOUNT_NAME\u003e.blob.core.windows.net\"", + "type": "string" + }, + "useSDKCreds": { + "description": "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + "type": "boolean" + } + } + }, + "io.argoproj.workflow.v1alpha1.AzureArtifactRepository": { + "description": "AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository", + "type": "object", + "required": [ + "endpoint", + "container" + ], + "properties": { + "accountKeySecret": { + "description": "AccountKeySecret is the secret selector to the Azure Blob Storage account access key", + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "blobNameFormat": { + "description": "BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables", + "type": "string" + }, + "container": { + "description": "Container is the container where resources will be stored", + "type": "string" + }, + "endpoint": { + "description": "Endpoint is the service url associated with an account. It is most likely \"https://\u003cACCOUNT_NAME\u003e.blob.core.windows.net\"", + "type": "string" + }, + "useSDKCreds": { + "description": "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + "type": "boolean" + } + } + }, "io.argoproj.workflow.v1alpha1.Backoff": { "description": "Backoff is a backoff strategy to use within retryStrategy", "type": "object", @@ -9178,6 +7794,20 @@ } } }, + "io.argoproj.workflow.v1alpha1.BasicAuth": { + "description": "BasicAuth describes the secret selectors required for basic authentication", + "type": "object", + "properties": { + "passwordSecret": { + "description": "PasswordSecret is the secret selector to the repository password", + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "usernameSecret": { + "description": "UsernameSecret is the secret selector to the repository username", + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + } + } + }, "io.argoproj.workflow.v1alpha1.Cache": { "description": "Cache is the configuration for the type of cache to be used", "type": "object", @@ -9191,6 +7821,18 @@ } } }, + "io.argoproj.workflow.v1alpha1.ClientCertAuth": { + "description": "ClientCertAuth holds necessary information for client authentication via certificates", + "type": "object", + "properties": { + "clientCertSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "clientKeySecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + } + } + }, "io.argoproj.workflow.v1alpha1.ClusterWorkflowTemplate": { "description": "ClusterWorkflowTemplate is the definition of a workflow template resource in cluster scope", "type": "object", @@ -9279,6 +7921,17 @@ } } }, + "io.argoproj.workflow.v1alpha1.CollectEventRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + }, + "io.argoproj.workflow.v1alpha1.CollectEventResponse": { + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Condition": { "type": "object", "properties": { @@ -9303,14 +7956,14 @@ ], "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" } }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" @@ -9339,7 +7992,7 @@ } }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -9909,6 +8562,9 @@ "serviceAccountName": { "type": "string" }, + "serviceAccountNamespace": { + "type": "string" + }, "subject": { "type": "string" } @@ -9921,6 +8577,10 @@ "repo" ], "properties": { + "branch": { + "description": "Branch is the branch to fetch when `SingleBranch` is enabled", + "type": "string" + }, "depth": { "description": "Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip", "type": "integer" @@ -9952,6 +8612,10 @@ "description": "Revision is the git commit, tag, branch to checkout", "type": "string" }, + "singleBranch": { + "description": "SingleBranch enables single branch clone, using the `branch` parameter", + "type": "boolean" + }, "sshPrivateKeySecret": { "description": "SSHPrivateKeySecret is the secret selector to the repository ssh private key", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" @@ -10073,6 +8737,10 @@ "description": "Body is content of the HTTP Request", "type": "string" }, + "bodyFrom": { + "description": "BodyFrom is content of the HTTP Request as Bytes", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.HTTPBodySource" + }, "headers": { "description": "Headers are an optional list of headers to send with HTTP requests", "type": "array", @@ -10081,7 +8749,7 @@ } }, "insecureSkipVerify": { - "description": "insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", + "description": "InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", "type": "boolean" }, "method": { @@ -10103,12 +8771,16 @@ } }, "io.argoproj.workflow.v1alpha1.HTTPArtifact": { - "description": "HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container", + "description": "HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container", "type": "object", "required": [ "url" ], "properties": { + "auth": { + "description": "Auth contains information for client authentication", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.HTTPAuth" + }, "headers": { "description": "Headers are an optional list of headers to send with HTTP requests for artifacts", "type": "array", @@ -10122,6 +8794,30 @@ } } }, + "io.argoproj.workflow.v1alpha1.HTTPAuth": { + "type": "object", + "properties": { + "basicAuth": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.BasicAuth" + }, + "clientCert": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ClientCertAuth" + }, + "oauth2": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.OAuth2Auth" + } + } + }, + "io.argoproj.workflow.v1alpha1.HTTPBodySource": { + "description": "HTTPBodySource contains the source of the HTTP body.", + "type": "object", + "properties": { + "bytes": { + "type": "string", + "format": "byte" + } + } + }, "io.argoproj.workflow.v1alpha1.HTTPHeader": { "type": "object", "required": [ @@ -10274,9 +8970,6 @@ }, "io.argoproj.workflow.v1alpha1.LifecycleHook": { "type": "object", - "required": [ - "template" - ], "properties": { "arguments": { "description": "Arguments hold arguments to the template", @@ -10343,6 +9036,18 @@ } } }, + "io.argoproj.workflow.v1alpha1.ManifestFrom": { + "type": "object", + "required": [ + "artifact" + ], + "properties": { + "artifact": { + "description": "Artifact contains the artifact to use", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Artifact" + } + } + }, "io.argoproj.workflow.v1alpha1.MemoizationStatus": { "description": "MemoizationStatus is the status of this memoized node", "type": "object", @@ -10633,6 +9338,50 @@ "description": "NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately.", "type": "object" }, + "io.argoproj.workflow.v1alpha1.OAuth2Auth": { + "description": "OAuth2Auth holds all information for client authentication via OAuth2 tokens", + "type": "object", + "properties": { + "clientIDSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "clientSecretSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + }, + "endpointParams": { + "type": "array", + "items": { + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.OAuth2EndpointParam" + } + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + } + }, + "tokenURLSecret": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" + } + } + }, + "io.argoproj.workflow.v1alpha1.OAuth2EndpointParam": { + "description": "EndpointParam is for requesting optional fields that should be sent in the oauth request", + "type": "object", + "required": [ + "key" + ], + "properties": { + "key": { + "description": "Name is the header name", + "type": "string" + }, + "value": { + "description": "Value is the literal value to use for the header", + "type": "string" + } + } + }, "io.argoproj.workflow.v1alpha1.OSSArtifact": { "description": "OSSArtifact is the location of an Alibaba Cloud OSS artifact", "type": "object", @@ -10902,6 +9651,10 @@ "description": "Manifest contains the kubernetes manifest", "type": "string" }, + "manifestFrom": { + "description": "ManifestFrom is the source for a single kubernetes manifest", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ManifestFrom" + }, "mergeStrategy": { "description": "MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json", "type": "string" @@ -10928,6 +9681,12 @@ "namespace": { "type": "string" }, + "parameters": { + "type": "array", + "items": { + "type": "string" + } + }, "uid": { "type": "string" } @@ -10954,6 +9713,12 @@ "nodeFieldSelector": { "type": "string" }, + "parameters": { + "type": "array", + "items": { + "type": "string" + } + }, "restartSuccessful": { "type": "boolean" }, @@ -10983,7 +9748,7 @@ "type": "string" }, "limit": { - "description": "Limit is the maximum number of attempts when retrying a container", + "description": "Limit is the maximum number of retry attempts when retrying a container. It does not include the original container; the maximum number of total attempts will be `limit + 1`.", "$ref": "#/definitions/io.k8s.apimachinery.pkg.util.intstr.IntOrString" }, "retryPolicy": { @@ -11125,14 +9890,14 @@ ], "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" } }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" @@ -11155,7 +9920,7 @@ } }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -11713,14 +10478,14 @@ ], "properties": { "args": { - "description": "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" } }, "command": { - "description": "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + "description": "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", "type": "array", "items": { "type": "string" @@ -11743,7 +10508,7 @@ } }, "image": { - "description": "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "description": "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", "type": "string" }, "imagePullPolicy": { @@ -12121,6 +10886,12 @@ }, "namespace": { "type": "string" + }, + "parameters": { + "type": "array", + "items": { + "type": "string" + } } } }, @@ -12150,6 +10921,12 @@ "nodeFieldSelector": { "type": "string" }, + "parameters": { + "type": "array", + "items": { + "type": "string" + } + }, "restartSuccessful": { "type": "boolean" } @@ -12198,6 +10975,10 @@ "description": "Arguments contain the parameters and artifacts sent to the workflow entrypoint Parameters are referencable globally using the 'workflow' variable prefix. e.g. {{io.argoproj.workflow.v1alpha1.parameters.myparam}}", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Arguments" }, + "artifactGC": { + "description": "ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts unless Artifact.ArtifactGC is specified, which overrides this)", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactGC" + }, "artifactRepositoryRef": { "description": "ArtifactRepositoryRef specifies the configMap name and key containing the artifact repository config.", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactRepositoryRef" @@ -12274,7 +11055,7 @@ "$ref": "#/definitions/io.k8s.api.policy.v1beta1.PodDisruptionBudgetSpec" }, "podGC": { - "description": "PodGC describes the strategy to use when to deleting completed pods", + "description": "PodGC describes the strategy to use when deleting completed pods", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.PodGC" }, "podMetadata": { @@ -12282,7 +11063,7 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.Metadata" }, "podPriority": { - "description": "Priority to apply to workflow pods.", + "description": "Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.", "type": "integer" }, "podPriorityClassName": { @@ -12352,7 +11133,7 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.TTLStrategy" }, "volumeClaimGC": { - "description": "VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows", + "description": "VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.VolumeClaimGC" }, "volumeClaimTemplates": { @@ -12374,7 +11155,7 @@ "x-kubernetes-patch-strategy": "merge" }, "workflowMetadata": { - "description": "WorkflowMetadata contains some metadata of the workflow to be refer", + "description": "WorkflowMetadata contains some metadata of the workflow to refer to", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.WorkflowMetadata" }, "workflowTemplateRef": { @@ -12387,6 +11168,10 @@ "description": "WorkflowStatus contains overall status information about a workflow", "type": "object", "properties": { + "artifactGCStatus": { + "description": "ArtifactGCStatus maintains the status of Artifact Garbage Collection", + "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtGCStatus" + }, "artifactRepositoryRef": { "description": "ArtifactRepositoryRef is used to cache the repository to use so we do not need to determine it everytime we reconcile.", "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ArtifactRepositoryRefStatus" @@ -15308,43 +14093,6 @@ "description": "Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors.\n\nThe serialization format is:\n\n\u003cquantity\u003e ::= \u003csignedNumber\u003e\u003csuffix\u003e\n (Note that \u003csuffix\u003e may be empty, from the \"\" case in \u003cdecimalSI\u003e.)\n\u003cdigit\u003e ::= 0 | 1 | ... | 9 \u003cdigits\u003e ::= \u003cdigit\u003e | \u003cdigit\u003e\u003cdigits\u003e \u003cnumber\u003e ::= \u003cdigits\u003e | \u003cdigits\u003e.\u003cdigits\u003e | \u003cdigits\u003e. | .\u003cdigits\u003e \u003csign\u003e ::= \"+\" | \"-\" \u003csignedNumber\u003e ::= \u003cnumber\u003e | \u003csign\u003e\u003cnumber\u003e \u003csuffix\u003e ::= \u003cbinarySI\u003e | \u003cdecimalExponent\u003e | \u003cdecimalSI\u003e \u003cbinarySI\u003e ::= Ki | Mi | Gi | Ti | Pi | Ei\n (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html)\n\u003cdecimalSI\u003e ::= m | \"\" | k | M | G | T | P | E\n (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.)\n\u003cdecimalExponent\u003e ::= \"e\" \u003csignedNumber\u003e | \"E\" \u003csignedNumber\u003e\n\nNo matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities.\n\nWhen a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized.\n\nBefore serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that:\n a. No precision is lost\n b. No fractional digits will be emitted\n c. The exponent (or suffix) is as large as possible.\nThe sign will be omitted unless the number is negative.\n\nExamples:\n 1.5 will be serialized as \"1500m\"\n 1.5Gi will be serialized as \"1536Mi\"\n\nNote that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise.\n\nNon-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.)\n\nThis format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.", "type": "string" }, - "io.k8s.apimachinery.pkg.apis.meta.v1.Condition": { - "description": "Condition contains details for one aspect of the current state of this API Resource.", - "type": "object", - "required": [ - "type", - "status", - "lastTransitionTime", - "reason", - "message" - ], - "properties": { - "lastTransitionTime": { - "description": "lastTransitionTime is the last time the condition transitioned from one status to another. This should be when the underlying condition changed. If that is not known, then using the time when the API field changed is acceptable.", - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - }, - "message": { - "description": "message is a human readable message indicating details about the transition. This may be an empty string.", - "type": "string" - }, - "observedGeneration": { - "description": "observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance.", - "type": "integer" - }, - "reason": { - "description": "reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty.", - "type": "string" - }, - "status": { - "description": "status of the condition, one of True, False, Unknown.", - "type": "string" - }, - "type": { - "description": "type of condition in CamelCase or in foo.example.com/CamelCase.", - "type": "string" - } - } - }, "io.k8s.apimachinery.pkg.apis.meta.v1.CreateOptions": { "description": "CreateOptions may be provided when creating an API object.", "type": "object", @@ -15362,16 +14110,7 @@ }, "fieldValidation": { "type": "string", - "title": "fieldValidation determines how the server should respond to\nunknown/duplicate fields in the object in the request.\nIntroduced as alpha in 1.23, older servers or servers with the\n`ServerSideFieldValidation` feature disabled will discard valid values\nspecified in this param and not perform any server side field validation.\nValid values are:\n- Ignore: ignores unknown/duplicate fields.\n- Warn: responds with a warning for each\nunknown/duplicate field, but successfully serves the request.\n- Strict: fails the request on unknown/duplicate fields.\n+optional" - } - } - }, - "io.k8s.apimachinery.pkg.apis.meta.v1.Duration": { - "description": "Duration is a wrapper around time.Duration which supports correct\nmarshaling to YAML and JSON. In particular, it marshals into strings, which\ncan be used as map keys in json.", - "type": "object", - "properties": { - "duration": { - "type": "string" + "title": "fieldValidation instructs the server on how to handle\nobjects in the request (POST/PUT/PATCH) containing unknown\nor duplicate fields, provided that the `ServerSideFieldValidation`\nfeature gate is also enabled. Valid values are:\n- Ignore: This will ignore any unknown fields that are silently\ndropped from the object, and will ignore all but the last duplicate\nfield that the decoder encounters. This is the default behavior\nprior to v1.23 and is the default behavior when the\n`ServerSideFieldValidation` feature gate is disabled.\n- Warn: This will send a warning via the standard warning response\nheader for each unknown field that is dropped from the object, and\nfor each duplicate field that is encountered. The request will\nstill succeed if there are no other errors, and will only persist\nthe last of any duplicate fields. This is the default when the\n`ServerSideFieldValidation` feature gate is enabled.\n- Strict: This will fail the request with a BadRequest error if\nany unknown fields would be dropped from the object, or if any\nduplicate fields are present. The error returned from the server\nwill contain all unknown and duplicate fields encountered.\n+optional" } } }, @@ -15655,55 +14394,6 @@ "io.k8s.apimachinery.pkg.util.intstr.IntOrString": { "type": "string" }, - "pipeline.DeletePipelineResponse": { - "type": "object" - }, - "pipeline.LogEntry": { - "type": "object", - "title": "structured log entry", - "properties": { - "msg": { - "type": "string" - }, - "namespace": { - "type": "string" - }, - "pipelineName": { - "type": "string" - }, - "stepName": { - "type": "string" - }, - "time": { - "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time" - } - } - }, - "pipeline.PipelineWatchEvent": { - "type": "object", - "properties": { - "object": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline" - }, - "type": { - "type": "string" - } - } - }, - "pipeline.RestartPipelineResponse": { - "type": "object" - }, - "pipeline.StepWatchEvent": { - "type": "object", - "properties": { - "object": { - "$ref": "#/definitions/github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Step" - }, - "type": { - "type": "string" - } - } - }, "sensor.CreateSensorRequest": { "type": "object", "properties": { @@ -15784,12 +14474,13 @@ "BearerToken": { "description": "Bearer Token authentication", "type": "apiKey", - "name": "authorization", + "name": "Authorization", "in": "header" - }, - "HTTPBasic": { - "description": "HTTP Basic authentication", - "type": "basic" } - } + }, + "security": [ + { + "BearerToken": [] + } + ] } \ No newline at end of file diff --git a/cmd/argo/commands/archive/resubmit.go b/cmd/argo/commands/archive/resubmit.go index dae6d6d92641..94408a1a48b2 100644 --- a/cmd/argo/commands/archive/resubmit.go +++ b/cmd/argo/commands/archive/resubmit.go @@ -45,27 +45,27 @@ func NewResubmitCommand() *cobra.Command { # Resubmit multiple workflows: - argo resubmit uid another-uid + argo archive resubmit uid another-uid # Resubmit multiple workflows by label selector: - argo resubmit -l workflows.argoproj.io/test=true + argo archive resubmit -l workflows.argoproj.io/test=true # Resubmit multiple workflows by field selector: - argo resubmit --field-selector metadata.namespace=argo + argo archive resubmit --field-selector metadata.namespace=argo # Resubmit and wait for completion: - argo resubmit --wait uid + argo archive resubmit --wait uid # Resubmit and watch until completion: - argo resubmit --watch uid + argo archive resubmit --watch uid # Resubmit and tail logs until completion: - argo resubmit --log uid + argo archive resubmit --log uid `, Run: func(cmd *cobra.Command, args []string) { if cmd.Flag("priority").Changed { @@ -82,6 +82,7 @@ func NewResubmitCommand() *cobra.Command { }, } + command.Flags().StringArrayVarP(&cliSubmitOpts.Parameters, "parameter", "p", []string{}, "input parameter to override on the original workflow spec") command.Flags().Int32Var(&resubmitOpts.priority, "priority", 0, "workflow priority") command.Flags().StringVarP(&cliSubmitOpts.Output, "output", "o", "", "Output format. One of: name|json|yaml|wide") command.Flags().BoolVarP(&cliSubmitOpts.Wait, "wait", "w", false, "wait for the workflow to complete, only works when a single workflow is resubmitted") @@ -127,10 +128,11 @@ func resubmitArchivedWorkflows(ctx context.Context, archiveServiceClient workflo resubmittedUids[string(wf.UID)] = true lastResubmitted, err = archiveServiceClient.ResubmitArchivedWorkflow(ctx, &workflowarchivepkg.ResubmitArchivedWorkflowRequest{ - Uid: string(wf.UID), - Namespace: wf.Namespace, - Name: wf.Name, - Memoized: resubmitOpts.memoized, + Uid: string(wf.UID), + Namespace: wf.Namespace, + Name: wf.Name, + Memoized: resubmitOpts.memoized, + Parameters: cliSubmitOpts.Parameters, }) if err != nil { return err diff --git a/cmd/argo/commands/archive/retry.go b/cmd/argo/commands/archive/retry.go new file mode 100644 index 000000000000..9a9171f3905a --- /dev/null +++ b/cmd/argo/commands/archive/retry.go @@ -0,0 +1,152 @@ +package archive + +import ( + "context" + "fmt" + "os" + + "github.com/argoproj/pkg/errors" + "github.com/spf13/cobra" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/fields" + "k8s.io/apimachinery/pkg/types" + + client "github.com/argoproj/argo-workflows/v3/cmd/argo/commands/client" + "github.com/argoproj/argo-workflows/v3/cmd/argo/commands/common" + workflowpkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflow" + workflowarchivepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflowarchive" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" +) + +type retryOps struct { + nodeFieldSelector string // --node-field-selector + restartSuccessful bool // --restart-successful + namespace string // --namespace + labelSelector string // --selector + fieldSelector string // --field-selector +} + +// hasSelector returns true if the CLI arguments selects multiple workflows +func (o *retryOps) hasSelector() bool { + if o.labelSelector != "" || o.fieldSelector != "" { + return true + } + return false +} + +func NewRetryCommand() *cobra.Command { + var ( + cliSubmitOpts common.CliSubmitOpts + retryOpts retryOps + ) + command := &cobra.Command{ + Use: "retry [WORKFLOW...]", + Short: "retry zero or more workflows", + Example: `# Retry a workflow: + + argo archive retry uid + +# Retry multiple workflows: + + argo archive retry uid another-uid + +# Retry multiple workflows by label selector: + + argo archive retry -l workflows.argoproj.io/test=true + +# Retry multiple workflows by field selector: + + argo archive retry --field-selector metadata.namespace=argo + +# Retry and wait for completion: + + argo archive retry --wait uid + +# Retry and watch until completion: + + argo archive retry --watch uid + +# Retry and tail logs until completion: + + argo archive retry --log uid +`, + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 && !retryOpts.hasSelector() { + cmd.HelpFunc()(cmd, args) + os.Exit(1) + } + + ctx, apiClient := client.NewAPIClient(cmd.Context()) + serviceClient := apiClient.NewWorkflowServiceClient() + archiveServiceClient, err := apiClient.NewArchivedWorkflowServiceClient() + errors.CheckError(err) + retryOpts.namespace = client.Namespace() + + err = retryArchivedWorkflows(ctx, archiveServiceClient, serviceClient, retryOpts, cliSubmitOpts, args) + errors.CheckError(err) + }, + } + + command.Flags().StringArrayVarP(&cliSubmitOpts.Parameters, "parameter", "p", []string{}, "input parameter to override on the original workflow spec") + command.Flags().StringVarP(&cliSubmitOpts.Output, "output", "o", "", "Output format. One of: name|json|yaml|wide") + command.Flags().BoolVarP(&cliSubmitOpts.Wait, "wait", "w", false, "wait for the workflow to complete, only works when a single workflow is retried") + command.Flags().BoolVar(&cliSubmitOpts.Watch, "watch", false, "watch the workflow until it completes, only works when a single workflow is retried") + command.Flags().BoolVar(&cliSubmitOpts.Log, "log", false, "log the workflow until it completes") + command.Flags().BoolVar(&retryOpts.restartSuccessful, "restart-successful", false, "indicates to restart successful nodes matching the --node-field-selector") + command.Flags().StringVar(&retryOpts.nodeFieldSelector, "node-field-selector", "", "selector of nodes to reset, eg: --node-field-selector inputs.paramaters.myparam.value=abc") + command.Flags().StringVarP(&retryOpts.labelSelector, "selector", "l", "", "Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)") + command.Flags().StringVar(&retryOpts.fieldSelector, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type.") + return command +} + +// retryWorkflows retries workflows by given retryArgs or workflow names +func retryArchivedWorkflows(ctx context.Context, archiveServiceClient workflowarchivepkg.ArchivedWorkflowServiceClient, serviceClient workflowpkg.WorkflowServiceClient, retryOpts retryOps, cliSubmitOpts common.CliSubmitOpts, args []string) error { + selector, err := fields.ParseSelector(retryOpts.nodeFieldSelector) + if err != nil { + return fmt.Errorf("unable to parse node field selector '%s': %s", retryOpts.nodeFieldSelector, err) + } + var wfs wfv1.Workflows + if retryOpts.hasSelector() { + wfs, err = listArchivedWorkflows(ctx, archiveServiceClient, retryOpts.fieldSelector, retryOpts.labelSelector, 0) + if err != nil { + return err + } + } + + for _, uid := range args { + wfs = append(wfs, wfv1.Workflow{ + ObjectMeta: metav1.ObjectMeta{ + UID: types.UID(uid), + Namespace: retryOpts.namespace, + }, + }) + } + + var lastRetried *wfv1.Workflow + retriedUids := make(map[string]bool) + for _, wf := range wfs { + if _, ok := retriedUids[string(wf.UID)]; ok { + // de-duplication in case there is an overlap between the selector and given workflow names + continue + } + retriedUids[string(wf.UID)] = true + + lastRetried, err = archiveServiceClient.RetryArchivedWorkflow(ctx, &workflowarchivepkg.RetryArchivedWorkflowRequest{ + Uid: string(wf.UID), + Namespace: wf.Namespace, + Name: wf.Name, + RestartSuccessful: retryOpts.restartSuccessful, + NodeFieldSelector: selector.String(), + Parameters: cliSubmitOpts.Parameters, + }) + if err != nil { + return err + } + printWorkflow(lastRetried, cliSubmitOpts.Output) + } + if len(retriedUids) == 1 { + // watch or wait when there is only one workflow retried + common.WaitWatchOrLog(ctx, serviceClient, lastRetried.Namespace, []string{lastRetried.Name}, cliSubmitOpts) + } + return nil +} diff --git a/cmd/argo/commands/archive/root.go b/cmd/argo/commands/archive/root.go index 06caf67ede5e..bf147daefd65 100644 --- a/cmd/argo/commands/archive/root.go +++ b/cmd/argo/commands/archive/root.go @@ -19,5 +19,6 @@ func NewArchiveCommand() *cobra.Command { command.AddCommand(NewListLabelKeyCommand()) command.AddCommand(NewListLabelValueCommand()) command.AddCommand(NewResubmitCommand()) + command.AddCommand(NewRetryCommand()) return command } diff --git a/cmd/argo/commands/client/conn.go b/cmd/argo/commands/client/conn.go index 5b710ab95594..5f28d1d8268d 100644 --- a/cmd/argo/commands/client/conn.go +++ b/cmd/argo/commands/client/conn.go @@ -16,7 +16,7 @@ import ( ) var ( - argoServerOpts = apiclient.ArgoServerOpts{} + ArgoServerOpts = apiclient.ArgoServerOpts{} instanceID string ) @@ -43,20 +43,20 @@ func GetConfig() clientcmd.ClientConfig { func AddAPIClientFlagsToCmd(cmd *cobra.Command) { cmd.PersistentFlags().StringVar(&instanceID, "instanceid", os.Getenv("ARGO_INSTANCEID"), "submit with a specific controller's instance id label. Default to the ARGO_INSTANCEID environment variable.") // "-s" like kubectl - cmd.PersistentFlags().StringVarP(&argoServerOpts.URL, "argo-server", "s", os.Getenv("ARGO_SERVER"), "API server `host:port`. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable.") - cmd.PersistentFlags().StringVar(&argoServerOpts.Path, "argo-base-href", os.Getenv("ARGO_BASE_HREF"), "An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable.") - cmd.PersistentFlags().BoolVar(&argoServerOpts.HTTP1, "argo-http1", os.Getenv("ARGO_HTTP1") == "true", "If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable.") - cmd.PersistentFlags().StringSliceVarP(&argoServerOpts.Headers, "header", "H", []string{}, "Sets additional header to all requests made by Argo CLI. (Can be repeated multiple times to add multiple headers, also supports comma separated headers) Used only when either ARGO_HTTP1 or --argo-http1 is set to true.") + cmd.PersistentFlags().StringVarP(&ArgoServerOpts.URL, "argo-server", "s", os.Getenv("ARGO_SERVER"), "API server `host:port`. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable.") + cmd.PersistentFlags().StringVar(&ArgoServerOpts.Path, "argo-base-href", os.Getenv("ARGO_BASE_HREF"), "An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable.") + cmd.PersistentFlags().BoolVar(&ArgoServerOpts.HTTP1, "argo-http1", os.Getenv("ARGO_HTTP1") == "true", "If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable.") + cmd.PersistentFlags().StringSliceVarP(&ArgoServerOpts.Headers, "header", "H", []string{}, "Sets additional header to all requests made by Argo CLI. (Can be repeated multiple times to add multiple headers, also supports comma separated headers) Used only when either ARGO_HTTP1 or --argo-http1 is set to true.") // "-e" for encrypted - like zip - cmd.PersistentFlags().BoolVarP(&argoServerOpts.Secure, "secure", "e", os.Getenv("ARGO_SECURE") != "false", "Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable.") + cmd.PersistentFlags().BoolVarP(&ArgoServerOpts.Secure, "secure", "e", os.Getenv("ARGO_SECURE") != "false", "Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable.") // "-k" like curl - cmd.PersistentFlags().BoolVarP(&argoServerOpts.InsecureSkipVerify, "insecure-skip-verify", "k", os.Getenv("ARGO_INSECURE_SKIP_VERIFY") == "true", "If true, the Argo Server's certificate will not be checked for validity. This will make your HTTPS connections insecure. Defaults to the ARGO_INSECURE_SKIP_VERIFY environment variable.") + cmd.PersistentFlags().BoolVarP(&ArgoServerOpts.InsecureSkipVerify, "insecure-skip-verify", "k", os.Getenv("ARGO_INSECURE_SKIP_VERIFY") == "true", "If true, the Argo Server's certificate will not be checked for validity. This will make your HTTPS connections insecure. Defaults to the ARGO_INSECURE_SKIP_VERIFY environment variable.") } func NewAPIClient(ctx context.Context) (context.Context, apiclient.Client) { ctx, client, err := apiclient.NewClientFromOpts( apiclient.Opts{ - ArgoServerOpts: argoServerOpts, + ArgoServerOpts: ArgoServerOpts, InstanceID: instanceID, AuthSupplier: func() string { return GetAuthString() diff --git a/cmd/argo/commands/common/get.go b/cmd/argo/commands/common/get.go index f548f8918e0d..47405f0014b2 100644 --- a/cmd/argo/commands/common/get.go +++ b/cmd/argo/commands/common/get.go @@ -118,6 +118,8 @@ func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { out += fmt.Sprintf(fmtStr, " "+art.Name+":", art.S3.String()) } else if art.Artifactory != nil { out += fmt.Sprintf(fmtStr, " "+art.Name+":", art.Artifactory.String()) + } else if art.Azure != nil { + out += fmt.Sprintf(fmtStr, " "+art.Name+":", art.Azure.String()) } } } @@ -164,7 +166,6 @@ func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { } } writerBuffer := new(bytes.Buffer) - printer.PrintSecurityNudges(*wf, writerBuffer) out += writerBuffer.String() return out } diff --git a/cmd/argo/commands/common/get_test.go b/cmd/argo/commands/common/get_test.go index b55c512373c7..42652d5bec98 100644 --- a/cmd/argo/commands/common/get_test.go +++ b/cmd/argo/commands/common/get_test.go @@ -3,6 +3,7 @@ package common import ( "bytes" "fmt" + "hash/fnv" "testing" "text/tabwriter" "time" @@ -15,12 +16,32 @@ import ( "github.com/argoproj/argo-workflows/v3/workflow/util" ) +var ( + workflowName string = "testWF" +) + +func init() { + // these values get used as part of determining node name and would normally be set as part of + // running the application + JobStatusIconMap = map[wfv1.NodePhase]string{ + wfv1.NodePending: ansiFormat("Pending", FgYellow), + wfv1.NodeRunning: ansiFormat("Running", FgCyan), + wfv1.NodeSucceeded: ansiFormat("Succeeded", FgGreen), + wfv1.NodeSkipped: ansiFormat("Skipped", FgDefault), + wfv1.NodeFailed: ansiFormat("Failed", FgRed), + wfv1.NodeError: ansiFormat("Error", FgRed), + } + NodeTypeIconMap = map[wfv1.NodeType]string{ + wfv1.NodeTypeSuspend: ansiFormat("Suspend", FgCyan), + } +} + func testPrintNodeImpl(t *testing.T, expected string, node wfv1.NodeStatus, getArgs GetFlags) { var result bytes.Buffer w := tabwriter.NewWriter(&result, 0, 8, 1, '\t', 0) filtered, _ := filterNode(node, getArgs) if !filtered { - printNode(w, node, "testWf", "", getArgs, util.GetPodNameVersion()) + printNode(w, node, workflowName, "", getArgs, util.GetPodNameVersion()) } err := w.Flush() assert.NoError(t, err) @@ -51,19 +72,24 @@ func TestPrintNode(t *testing.T) { FinishedAt: timestamp, Message: nodeMessage, } + node.HostNodeName = kubernetesNodeName - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, "", nodeID, "0s", nodeMessage, ""), node, getArgs) + // derive expected pod name: + h := fnv.New32a() + _, _ = h.Write([]byte(fmt.Sprintf("%s %s", JobStatusIconMap[wfv1.NodeRunning], nodeName))) + expectedPodName := fmt.Sprintf("%s-%s-%v", workflowName, node.TemplateName, h.Sum32()) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, "", expectedPodName, "0s", nodeMessage, ""), node, getArgs) // Compatibility test getArgs.Status = "Running" - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeID, "0s", nodeMessage), node, getArgs) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, expectedPodName, "0s", nodeMessage), node, getArgs) getArgs.Status = "" getArgs.NodeFieldSelectorString = "phase=Running" - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeID, "0s", nodeMessage), node, getArgs) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, expectedPodName, "0s", nodeMessage), node, getArgs) getArgs.NodeFieldSelectorString = "phase!=foobar" - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeID, "0s", nodeMessage), node, getArgs) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t\t%s\t%s\t%s\t\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, expectedPodName, "0s", nodeMessage), node, getArgs) getArgs.NodeFieldSelectorString = "phase!=Running" testPrintNodeImpl(t, "", node, getArgs) @@ -82,7 +108,8 @@ func TestPrintNode(t *testing.T) { } node.TemplateName = nodeTemplateName - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeTemplateName, nodeID, "0s", nodeMessage, ""), node, getArgs) + expectedPodName = fmt.Sprintf("%s-%s-%v", workflowName, node.TemplateName, h.Sum32()) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeTemplateName, expectedPodName, "0s", nodeMessage, ""), node, getArgs) node.Type = wfv1.NodeTypeSuspend testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s\t%s\t%s\t%s\t%s\n", NodeTypeIconMap[wfv1.NodeTypeSuspend], nodeName, nodeTemplateName, "", "", nodeMessage, ""), node, getArgs) @@ -91,16 +118,18 @@ func TestPrintNode(t *testing.T) { Name: nodeTemplateRefName, Template: nodeTemplateRefName, } + templateName := fmt.Sprintf("%s/%s", node.TemplateRef.Name, node.TemplateRef.Template) + expectedPodName = fmt.Sprintf("%s-%s-%v", workflowName, templateName, h.Sum32()) testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\n", NodeTypeIconMap[wfv1.NodeTypeSuspend], nodeName, nodeTemplateRefName, nodeTemplateRefName, "", "", nodeMessage, ""), node, getArgs) getArgs.Output = "wide" testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\t%s\t\n", NodeTypeIconMap[wfv1.NodeTypeSuspend], nodeName, nodeTemplateRefName, nodeTemplateRefName, "", "", getArtifactsString(node), nodeMessage, ""), node, getArgs) node.Type = wfv1.NodeTypePod - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeTemplateRefName, nodeTemplateRefName, nodeID, "0s", getArtifactsString(node), nodeMessage, "", kubernetesNodeName), node, getArgs) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeTemplateRefName, nodeTemplateRefName, expectedPodName, "0s", getArtifactsString(node), nodeMessage, "", kubernetesNodeName), node, getArgs) getArgs.Output = "short" - testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\n", NodeTypeIconMap[wfv1.NodeTypeSuspend], nodeName, nodeTemplateRefName, nodeTemplateRefName, nodeID, "0s", nodeMessage, kubernetesNodeName), node, getArgs) + testPrintNodeImpl(t, fmt.Sprintf("%s %s\t%s/%s\t%s\t%s\t%s\t%s\n", JobStatusIconMap[wfv1.NodeRunning], nodeName, nodeTemplateRefName, nodeTemplateRefName, expectedPodName, "0s", nodeMessage, kubernetesNodeName), node, getArgs) getArgs.Status = "foobar" testPrintNodeImpl(t, "", node, getArgs) @@ -213,6 +242,7 @@ status: finishedAt: "2020-06-02T16:04:42Z" id: many-items-z26lj-753834747 name: many-items-z26lj[0].sleep(8:eight) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -222,6 +252,7 @@ status: finishedAt: "2020-06-02T16:04:45Z" id: many-items-z26lj-1052882686 name: many-items-z26lj[0].sleep(10:ten) + phase: Succeeded startedAt: "2020-06-02T16:04:22Z" templateName: sleep type: Pod @@ -255,6 +286,7 @@ status: finishedAt: "2020-06-02T16:04:54Z" id: many-items-z26lj-1774150289 name: many-items-z26lj[0].sleep(3:three) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -264,6 +296,7 @@ status: finishedAt: "2020-06-02T16:04:48Z" id: many-items-z26lj-1939921510 name: many-items-z26lj[0].sleep(0:zero) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -283,6 +316,7 @@ status: finishedAt: "2020-06-02T16:04:53Z" id: many-items-z26lj-2156977535 name: many-items-z26lj[0].sleep(1:one) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -292,6 +326,7 @@ status: finishedAt: "2020-06-02T16:04:40Z" id: many-items-z26lj-2619926859 name: many-items-z26lj[0].sleep(9:nine) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -301,6 +336,7 @@ status: finishedAt: "2020-06-02T16:04:44Z" id: many-items-z26lj-3011405271 name: many-items-z26lj[0].sleep(11:eleven) + phase: Succeeded startedAt: "2020-06-02T16:04:22Z" templateName: sleep type: Pod @@ -310,6 +346,7 @@ status: finishedAt: "2020-06-02T16:04:57Z" id: many-items-z26lj-3031375822 name: many-items-z26lj[0].sleep(7:seven) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -319,6 +356,7 @@ status: finishedAt: "2020-06-02T16:04:59Z" id: many-items-z26lj-3126938806 name: many-items-z26lj[0].sleep(12:twelve) + phase: Succeeded startedAt: "2020-06-02T16:04:22Z" templateName: sleep type: Pod @@ -328,6 +366,7 @@ status: finishedAt: "2020-06-02T16:04:56Z" id: many-items-z26lj-3178865096 name: many-items-z26lj[0].sleep(6:six) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -337,6 +376,7 @@ status: finishedAt: "2020-06-02T16:04:51Z" id: many-items-z26lj-3409403178 name: many-items-z26lj[0].sleep(2:two) + phase: Succeeded startedAt: "2020-06-02T16:04:21Z" templateName: sleep type: Pod @@ -353,14 +393,24 @@ status: phase: Succeeded startedAt: "2020-06-02T16:04:21Z" `, &wf) + output := PrintWorkflowHelper(&wf, GetFlags{}) - assert.Contains(t, output, ` - ├─ sleep(9:nine) sleep many-items-z26lj-2619926859 19s - ├─ sleep(10:ten) sleep many-items-z26lj-1052882686 23s - ├─ sleep(11:eleven) sleep many-items-z26lj-3011405271 22s`) - assert.Contains(t, output, "This workflow does not have security context set. "+ - "You can run your workflow pods more securely by setting it.\n"+ - "Learn more at https://argoproj.github.io/argo-workflows/workflow-pod-security-context/\n") + + // derive expected pod name: + h := fnv.New32a() + _, _ = h.Write([]byte(fmt.Sprintf("%s %s", JobStatusIconMap[wfv1.NodeSucceeded], "sleep(9:nine)"))) + expectedPodName := fmt.Sprintf("many-items-z26lj-sleep-%v", h.Sum32()) + assert.Contains(t, output, fmt.Sprintf("sleep(9:nine) sleep %s 19s", expectedPodName)) + + h.Reset() + _, _ = h.Write([]byte(fmt.Sprintf("%s %s", JobStatusIconMap[wfv1.NodeSucceeded], "sleep(10:ten)"))) + expectedPodName = fmt.Sprintf("many-items-z26lj-sleep-%v", h.Sum32()) + assert.Contains(t, output, fmt.Sprintf("sleep(10:ten) sleep %s 23s", expectedPodName)) + + h.Reset() + _, _ = h.Write([]byte(fmt.Sprintf("%s %s", JobStatusIconMap[wfv1.NodeSucceeded], "sleep(11:eleven)"))) + expectedPodName = fmt.Sprintf("many-items-z26lj-sleep-%v", h.Sum32()) + assert.Contains(t, output, fmt.Sprintf("sleep(11:eleven) sleep %s 22s", expectedPodName)) }) } @@ -383,8 +433,4 @@ func Test_printWorkflowHelperNudges(t *testing.T) { output := PrintWorkflowHelper(&securedWf, GetFlags{}) assert.NotContains(t, output, securityNudges) }) - t.Run("InsecureWorkflow", func(t *testing.T) { - output := PrintWorkflowHelper(&insecureWf, GetFlags{}) - assert.Contains(t, output, securityNudges) - }) } diff --git a/cmd/argo/commands/common/submit.go b/cmd/argo/commands/common/submit.go index 77687b9467c8..60b3f43ede29 100644 --- a/cmd/argo/commands/common/submit.go +++ b/cmd/argo/commands/common/submit.go @@ -18,7 +18,8 @@ type CliSubmitOpts struct { Strict bool // --strict Priority *int32 // --priority GetArgs GetFlags - ScheduledTime string // --scheduled-time + ScheduledTime string // --scheduled-time + Parameters []string // --parameter } func WaitWatchOrLog(ctx context.Context, serviceClient workflowpkg.WorkflowServiceClient, namespace string, workflowNames []string, cliSubmitOpts CliSubmitOpts) { diff --git a/cmd/argo/commands/common/wait.go b/cmd/argo/commands/common/wait.go index 9e872c6129bf..69f7c6b46d47 100644 --- a/cmd/argo/commands/common/wait.go +++ b/cmd/argo/commands/common/wait.go @@ -69,7 +69,7 @@ func waitOnOne(serviceClient workflowpkg.WorkflowServiceClient, ctx context.Cont continue } wf := event.Object - if !wf.Status.FinishedAt.IsZero() { + if wf != nil && !wf.Status.FinishedAt.IsZero() { if !quiet { fmt.Printf("%s %s at %v\n", wfName, wf.Status.Phase, wf.Status.FinishedAt) } diff --git a/cmd/argo/commands/cp.go b/cmd/argo/commands/cp.go new file mode 100644 index 000000000000..d175a032b285 --- /dev/null +++ b/cmd/argo/commands/cp.go @@ -0,0 +1,140 @@ +package commands + +import ( + "crypto/tls" + "fmt" + "io" + "log" + "net/http" + "os" + "path" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/argoproj/argo-workflows/v3/cmd/argo/commands/client" + "github.com/argoproj/argo-workflows/v3/pkg/apiclient" + workflowpkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflow" + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" +) + +func NewCpCommand() *cobra.Command { + var ( + namespace string // --namespace + nodeId string // --node-id + templateName string // --template-name + artifactName string // --artifact-name + customPath string // --path + ) + command := &cobra.Command{ + Use: "cp my-wf output-directory ...", + Short: "copy artifacts from workflow", + Example: `# Copy a workflow's artifacts to a local output directory: + + argo cp my-wf output-directory + +# Copy artifacts from a specific node in a workflow to a local output directory: + + argo cp my-wf output-directory --node-id=my-wf-node-id-123 +`, + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) != 2 { + cmd.HelpFunc()(cmd, args) + return fmt.Errorf("incorrect number of arguments") + } + workflowName := args[0] + outputDir := args[1] + + ctx, apiClient := client.NewAPIClient(cmd.Context()) + serviceClient := apiClient.NewWorkflowServiceClient() + if len(namespace) == 0 { + namespace = client.Namespace() + } + workflow, err := serviceClient.GetWorkflow(ctx, &workflowpkg.WorkflowGetRequest{ + Name: workflowName, + Namespace: namespace, + }) + if err != nil { + return fmt.Errorf("failed to get workflow: %w", err) + } + + workflowName = workflow.Name + artifactSearchQuery := v1alpha1.ArtifactSearchQuery{ + ArtifactName: artifactName, + TemplateName: templateName, + NodeId: nodeId, + } + artifactSearchResults := workflow.SearchArtifacts(&artifactSearchQuery) + + c := &http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: client.ArgoServerOpts.InsecureSkipVerify, + }, + }, + } + + for _, artifact := range artifactSearchResults { + customPath := filepath.Join(outputDir, customPath) + nodeInfo := workflow.Status.Nodes.Find(func(n v1alpha1.NodeStatus) bool { return n.ID == artifact.NodeID }) + if nodeInfo == nil { + return fmt.Errorf("could not get node status for node ID %s", artifact.NodeID) + } + customPath = strings.Replace(customPath, "{templateName}", nodeInfo.TemplateName, 1) + customPath = strings.Replace(customPath, "{namespace}", namespace, 1) + customPath = strings.Replace(customPath, "{workflowName}", workflowName, 1) + customPath = strings.Replace(customPath, "{nodeId}", artifact.NodeID, 1) + customPath = strings.Replace(customPath, "{artifactName}", artifact.Name, 1) + err = os.MkdirAll(customPath, os.ModePerm) + if err != nil { + return fmt.Errorf("failed to create folder path: %w", err) + } + key, err := artifact.GetKey() + if err != nil { + return fmt.Errorf("error getting key for artifact: %w", err) + } + err = getAndStoreArtifactData(namespace, workflowName, artifact.NodeID, artifact.Name, path.Base(key), customPath, c, client.ArgoServerOpts) + if err != nil { + return fmt.Errorf("failed to get and store artifact data: %w", err) + } + } + return nil + }, + } + command.Flags().StringVarP(&namespace, "namespace", "n", "", "namespace of workflow") + command.Flags().StringVar(&nodeId, "node-id", "", "id of node in workflow") + command.Flags().StringVar(&templateName, "template-name", "", "name of template in workflow") + command.Flags().StringVar(&artifactName, "artifact-name", "", "name of output artifact in workflow") + command.Flags().StringVar(&customPath, "path", "{namespace}/{workflowName}/{nodeId}/outputs/{artifactName}", "use variables {workflowName}, {nodeId}, {templateName}, {artifactName}, and {namespace} to create a customized path to store the artifacts; example: {workflowName}/{templateName}/{artifactName}") + return command +} + +func getAndStoreArtifactData(namespace string, workflowName string, nodeId string, artifactName string, fileName string, customPath string, c *http.Client, argoServerOpts apiclient.ArgoServerOpts) error { + request, err := http.NewRequest("GET", fmt.Sprintf("%s/artifacts/%s/%s/%s/%s", argoServerOpts.GetURL(), namespace, workflowName, nodeId, artifactName), nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + request.Header.Set("Authorization", client.GetAuthString()) + resp, err := c.Do(request) + if err != nil { + return fmt.Errorf("request failed with: %w", err) + } + defer resp.Body.Close() + if resp.StatusCode != 200 { + return fmt.Errorf("request failed %s", resp.Status) + } + artifactFilePath := filepath.Join(customPath, fileName) + fileWriter, err := os.Create(artifactFilePath) + if err != nil { + return fmt.Errorf("creating file failed: %w", err) + } + defer fileWriter.Close() + _, err = io.Copy(fileWriter, resp.Body) + if err != nil { + return fmt.Errorf("copying file contents failed: %w", err) + } + log.Printf("Created %q", fileName) + return nil +} diff --git a/cmd/argo/commands/delete.go b/cmd/argo/commands/delete.go index b8f663fa02c9..8c947697224e 100644 --- a/cmd/argo/commands/delete.go +++ b/cmd/argo/commands/delete.go @@ -22,9 +22,10 @@ func NewDeleteCommand() *cobra.Command { all bool allNamespaces bool dryRun bool + force bool ) command := &cobra.Command{ - Use: "delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmitted] [--prefix PREFIX] [--selector SELECTOR]]", + Use: "delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmitted] [--prefix PREFIX] [--selector SELECTOR] [--force] ]", Short: "delete workflows", Example: `# Delete a workflow: @@ -64,7 +65,7 @@ func NewDeleteCommand() *cobra.Command { for _, wf := range workflows { if !dryRun { - _, err := serviceClient.DeleteWorkflow(ctx, &workflowpkg.WorkflowDeleteRequest{Name: wf.Name, Namespace: wf.Namespace}) + _, err := serviceClient.DeleteWorkflow(ctx, &workflowpkg.WorkflowDeleteRequest{Name: wf.Name, Namespace: wf.Namespace, Force: force}) if err != nil && status.Code(err) == codes.NotFound { fmt.Printf("Workflow '%s' not found\n", wf.Name) continue @@ -85,7 +86,8 @@ func NewDeleteCommand() *cobra.Command { command.Flags().StringVar(&flags.prefix, "prefix", "", "Delete workflows by prefix") command.Flags().StringVar(&flags.finishedAfter, "older", "", "Delete completed workflows finished before the specified duration (e.g. 10m, 3h, 1d)") command.Flags().StringVarP(&flags.labels, "selector", "l", "", "Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)") - command.Flags().StringVar(&flags.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type.") + command.Flags().StringVar(&flags.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type.") command.Flags().BoolVar(&dryRun, "dry-run", false, "Do not delete the workflow, only print what would happen") + command.Flags().BoolVar(&force, "force", false, "Force delete workflows by removing finalizers") return command } diff --git a/cmd/argo/commands/list.go b/cmd/argo/commands/list.go index 024ca4955979..ce524f3e4f18 100644 --- a/cmd/argo/commands/list.go +++ b/cmd/argo/commands/list.go @@ -90,7 +90,7 @@ func NewListCommand() *cobra.Command { command.Flags().Int64VarP(&listArgs.chunkSize, "chunk-size", "", 0, "Return large lists in chunks rather than all at once. Pass 0 to disable.") command.Flags().BoolVar(&listArgs.noHeaders, "no-headers", false, "Don't print headers (default print headers).") command.Flags().StringVarP(&listArgs.labels, "selector", "l", "", "Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)") - command.Flags().StringVar(&listArgs.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type.") + command.Flags().StringVar(&listArgs.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type.") return command } diff --git a/cmd/argo/commands/resubmit.go b/cmd/argo/commands/resubmit.go index 9aa1d645a374..d643dd2c8373 100644 --- a/cmd/argo/commands/resubmit.go +++ b/cmd/argo/commands/resubmit.go @@ -82,6 +82,7 @@ func NewResubmitCommand() *cobra.Command { }, } + command.Flags().StringArrayVarP(&cliSubmitOpts.Parameters, "parameter", "p", []string{}, "input parameter to override on the original workflow spec") command.Flags().Int32Var(&resubmitOpts.priority, "priority", 0, "workflow priority") command.Flags().StringVarP(&cliSubmitOpts.Output, "output", "o", "", "Output format. One of: name|json|yaml|wide") command.Flags().BoolVarP(&cliSubmitOpts.Wait, "wait", "w", false, "wait for the workflow to complete, only works when a single workflow is resubmitted") @@ -130,9 +131,10 @@ func resubmitWorkflows(ctx context.Context, serviceClient workflowpkg.WorkflowSe resubmittedNames[wf.Name] = true lastResubmitted, err = serviceClient.ResubmitWorkflow(ctx, &workflowpkg.WorkflowResubmitRequest{ - Namespace: wf.Namespace, - Name: wf.Name, - Memoized: resubmitOpts.memoized, + Namespace: wf.Namespace, + Name: wf.Name, + Memoized: resubmitOpts.memoized, + Parameters: cliSubmitOpts.Parameters, }) if err != nil { return err diff --git a/cmd/argo/commands/retry.go b/cmd/argo/commands/retry.go index 5f2d2b01b6b0..32f05ab81d2a 100644 --- a/cmd/argo/commands/retry.go +++ b/cmd/argo/commands/retry.go @@ -85,6 +85,7 @@ func NewRetryCommand() *cobra.Command { errors.CheckError(err) }, } + command.Flags().StringArrayVarP(&cliSubmitOpts.Parameters, "parameter", "p", []string{}, "input parameter to override on the original workflow spec") command.Flags().StringVarP(&cliSubmitOpts.Output, "output", "o", "", "Output format. One of: name|json|yaml|wide") command.Flags().BoolVarP(&cliSubmitOpts.Wait, "wait", "w", false, "wait for the workflow to complete, only works when a single workflow is retried") command.Flags().BoolVar(&cliSubmitOpts.Watch, "watch", false, "watch the workflow until it completes, only works when a single workflow is retried") @@ -137,6 +138,7 @@ func retryWorkflows(ctx context.Context, serviceClient workflowpkg.WorkflowServi Namespace: wf.Namespace, RestartSuccessful: retryOpts.restartSuccessful, NodeFieldSelector: selector.String(), + Parameters: cliSubmitOpts.Parameters, }) if err != nil { return err diff --git a/cmd/argo/commands/root.go b/cmd/argo/commands/root.go index 42ed3d1d248d..5df892b72488 100644 --- a/cmd/argo/commands/root.go +++ b/cmd/argo/commands/root.go @@ -103,6 +103,7 @@ If your server is behind an ingress with a path (you'll be running "argo server command.AddCommand(auth.NewAuthCommand()) command.AddCommand(NewWaitCommand()) command.AddCommand(NewWatchCommand()) + command.AddCommand(NewCpCommand()) command.AddCommand(NewStopCommand()) command.AddCommand(NewNodeCommand()) command.AddCommand(NewTerminateCommand()) diff --git a/cmd/argo/commands/server.go b/cmd/argo/commands/server.go index b343ae36e252..bb91edc767d9 100644 --- a/cmd/argo/commands/server.go +++ b/cmd/argo/commands/server.go @@ -35,6 +35,7 @@ import ( "github.com/argoproj/argo-workflows/v3/util/help" pprofutil "github.com/argoproj/argo-workflows/v3/util/pprof" tlsutils "github.com/argoproj/argo-workflows/v3/util/tls" + "github.com/argoproj/argo-workflows/v3/workflow/common" ) func NewServerCommand() *cobra.Command { @@ -48,13 +49,14 @@ func NewServerCommand() *cobra.Command { htst bool namespaced bool // --namespaced managedNamespace string // --managed-namespace - ssoNamespace string enableOpenBrowser bool eventOperationQueueSize int eventWorkerCount int eventAsyncDispatch bool frameOptions string accessControlAllowOrigin string + apiRateLimit uint64 + allowedLinkProtocol []string logFormat string // --log-format ) @@ -97,10 +99,16 @@ See %s`, help.ArgoServer), managedNamespace = namespace } + ssoNamespace := namespace + if managedNamespace != "" { + ssoNamespace = managedNamespace + } + log.WithFields(log.Fields{ "authModes": authModes, "namespace": namespace, "managedNamespace": managedNamespace, + "ssoNamespace": ssoNamespace, "baseHRef": baseHRef, "secure": secure, }).Info() @@ -142,39 +150,25 @@ See %s`, help.ArgoServer), log.Warn("You are running without client authentication. Learn how to enable client authentication: https://argoproj.github.io/argo-workflows/argo-server-auth-mode/") } - if namespaced { - // Case 1: If ssoNamespace is not specified, default it to installation namespace - if ssoNamespace == "" { - ssoNamespace = namespace - } - // Case 2: If ssoNamespace is not equal to installation or managed namespace, default it to installation namespace - if ssoNamespace != namespace && ssoNamespace != managedNamespace { - log.Warn("--sso-namespace should be equal to --managed-namespace or the installation namespace") - ssoNamespace = namespace - } - } else { - if ssoNamespace != "" { - log.Warn("ignoring --sso-namespace because --namespaced is false") - } - ssoNamespace = namespace - } opts := apiserver.ArgoServerOpts{ BaseHRef: baseHRef, TLSConfig: tlsConfig, HSTS: htst, Namespaced: namespaced, Namespace: namespace, - SSONameSpace: ssoNamespace, Clients: clients, RestConfig: config, AuthModes: modes, ManagedNamespace: managedNamespace, + SSONamespace: ssoNamespace, ConfigName: configMap, EventOperationQueueSize: eventOperationQueueSize, EventWorkerCount: eventWorkerCount, EventAsyncDispatch: eventAsyncDispatch, XFrameOptions: frameOptions, AccessControlAllowOrigin: accessControlAllowOrigin, + APIRateLimit: apiRateLimit, + AllowedLinkProtocol: allowedLinkProtocol, } browserOpenFunc := func(url string) {} if enableOpenBrowser { @@ -214,22 +208,28 @@ See %s`, help.ArgoServer), defaultBaseHRef = "/" } + defaultAllowedLinkProtocol := []string{"http", "https"} + if protocol := os.Getenv("ALLOWED_LINK_PROTOCOL"); protocol != "" { + defaultAllowedLinkProtocol = strings.Split(protocol, ",") + } + command.Flags().IntVarP(&port, "port", "p", 2746, "Port to listen on") command.Flags().StringVar(&baseHRef, "basehref", defaultBaseHRef, "Value for base href in index.html. Used if the server is running behind reverse proxy under subpath different from /. Defaults to the environment variable BASE_HREF.") // "-e" for encrypt, like zip command.Flags().BoolVarP(&secure, "secure", "e", true, "Whether or not we should listen on TLS.") command.Flags().BoolVar(&htst, "hsts", true, "Whether or not we should add a HTTP Secure Transport Security header. This only has effect if secure is enabled.") command.Flags().StringArrayVar(&authModes, "auth-mode", []string{"client"}, "API server authentication mode. Any 1 or more length permutation of: client,server,sso") - command.Flags().StringVar(&configMap, "configmap", "workflow-controller-configmap", "Name of K8s configmap to retrieve workflow controller configuration") + command.Flags().StringVar(&configMap, "configmap", common.ConfigMapName, "Name of K8s configmap to retrieve workflow controller configuration") command.Flags().BoolVar(&namespaced, "namespaced", false, "run as namespaced mode") command.Flags().StringVar(&managedNamespace, "managed-namespace", "", "namespace that watches, default to the installation namespace") - command.Flags().StringVar(&ssoNamespace, "sso-namespace", "", "namespace that will be used for SSO RBAC. Defaults to installation namespace. Used only in namespaced mode") command.Flags().BoolVarP(&enableOpenBrowser, "browser", "b", false, "enable automatic launching of the browser [local mode]") command.Flags().IntVar(&eventOperationQueueSize, "event-operation-queue-size", 16, "how many events operations that can be queued at once") command.Flags().IntVar(&eventWorkerCount, "event-worker-count", 4, "how many event workers to run") command.Flags().BoolVar(&eventAsyncDispatch, "event-async-dispatch", false, "dispatch event async") command.Flags().StringVar(&frameOptions, "x-frame-options", "DENY", "Set X-Frame-Options header in HTTP responses.") command.Flags().StringVar(&accessControlAllowOrigin, "access-control-allow-origin", "", "Set Access-Control-Allow-Origin header in HTTP responses.") + command.Flags().Uint64Var(&apiRateLimit, "api-rate-limit", 1000, "Set limit per IP for api ratelimiter") + command.Flags().StringArrayVar(&allowedLinkProtocol, "allowed-link-protocol", defaultAllowedLinkProtocol, "Allowed link protocol in configMap. Used if the allowed configMap links protocol are different from http,https. Defaults to the environment variable ALLOWED_LINK_PROTOCOL") command.Flags().StringVar(&logFormat, "log-format", "text", "The formatter to use for logs. One of: text|json") viper.AutomaticEnv() diff --git a/cmd/argo/commands/terminate.go b/cmd/argo/commands/terminate.go index f21ee0aa51a2..c20b75d374be 100644 --- a/cmd/argo/commands/terminate.go +++ b/cmd/argo/commands/terminate.go @@ -103,7 +103,7 @@ func NewTerminateCommand() *cobra.Command { } command.Flags().StringVarP(&t.labels, "selector", "l", "", "Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)") - command.Flags().StringVar(&t.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type.") + command.Flags().StringVar(&t.fields, "field-selector", "", "Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type.") command.Flags().BoolVar(&t.dryRun, "dry-run", false, "Do not terminate the workflow, only print what would happen") return command } diff --git a/cmd/argo/lint/formatter_pretty.go b/cmd/argo/lint/formatter_pretty.go index 2b3ceacaec13..1d3807aed500 100644 --- a/cmd/argo/lint/formatter_pretty.go +++ b/cmd/argo/lint/formatter_pretty.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/TwinProduction/go-color" + "github.com/TwiN/go-color" ) const ( diff --git a/cmd/argo/lint/formatter_pretty_test.go b/cmd/argo/lint/formatter_pretty_test.go index 848bb4120c81..310c79ba9d47 100644 --- a/cmd/argo/lint/formatter_pretty_test.go +++ b/cmd/argo/lint/formatter_pretty_test.go @@ -4,7 +4,7 @@ import ( "fmt" "testing" - "github.com/TwinProduction/go-color" + "github.com/TwiN/go-color" "github.com/stretchr/testify/assert" ) diff --git a/cmd/argoexec/commands/artifact/delete.go b/cmd/argoexec/commands/artifact/delete.go new file mode 100644 index 000000000000..39ee4fd7decd --- /dev/null +++ b/cmd/argoexec/commands/artifact/delete.go @@ -0,0 +1,131 @@ +package artifact + +import ( + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + + "github.com/argoproj/argo-workflows/v3/cmd/argo/commands/client" + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + workflow "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned" + wfv1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned/typed/workflow/v1alpha1" + executor "github.com/argoproj/argo-workflows/v3/workflow/artifacts" + "github.com/argoproj/argo-workflows/v3/workflow/common" +) + +func NewArtifactDeleteCommand() *cobra.Command { + return &cobra.Command{ + Use: "delete", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + + namespace := client.Namespace() + clientConfig := client.GetConfig() + + if podName, ok := os.LookupEnv(common.EnvVarArtifactPodName); ok { + + config, err := clientConfig.ClientConfig() + workflowInterface := workflow.NewForConfigOrDie(config) + if err != nil { + return err + } + + artifactGCTaskInterface := workflowInterface.ArgoprojV1alpha1().WorkflowArtifactGCTasks(namespace) + labelSelector := fmt.Sprintf("%s = %s", common.LabelKeyArtifactGCPodName, podName) + + err = deleteArtifacts(labelSelector, cmd.Context(), artifactGCTaskInterface) + if err != nil { + return err + } + } + return nil + }, + } +} + +func deleteArtifacts(labelSelector string, ctx context.Context, artifactGCTaskInterface wfv1alpha1.WorkflowArtifactGCTaskInterface) error { + + taskList, err := artifactGCTaskInterface.List(context.Background(), metav1.ListOptions{LabelSelector: labelSelector}) + if err != nil { + return err + } + + for _, task := range taskList.Items { + task.Status.ArtifactResultsByNode = make(map[string]v1alpha1.ArtifactResultNodeStatus) + for nodeName, artifactNodeSpec := range task.Spec.ArtifactsByNode { + + var archiveLocation *v1alpha1.ArtifactLocation + artResultNodeStatus := v1alpha1.ArtifactResultNodeStatus{ArtifactResults: make(map[string]v1alpha1.ArtifactResult)} + if artifactNodeSpec.ArchiveLocation != nil { + archiveLocation = artifactNodeSpec.ArchiveLocation + } + + var resources resources + resources.Files = make(map[string][]byte) // same resources for every artifact + for _, artifact := range artifactNodeSpec.Artifacts { + if archiveLocation != nil { + err := artifact.Relocate(archiveLocation) + if err != nil { + return err + } + } + + drv, err := executor.NewDriver(ctx, &artifact, resources) + if err != nil { + return err + } + + err = drv.Delete(&artifact) + if err != nil { + errString := err.Error() + artResultNodeStatus.ArtifactResults[artifact.Name] = v1alpha1.ArtifactResult{Name: artifact.Name, Success: false, Error: &errString} + } else { + artResultNodeStatus.ArtifactResults[artifact.Name] = v1alpha1.ArtifactResult{Name: artifact.Name, Success: true, Error: nil} + } + } + + task.Status.ArtifactResultsByNode[nodeName] = artResultNodeStatus + } + patch, err := json.Marshal(map[string]interface{}{"status": v1alpha1.ArtifactGCStatus{ArtifactResultsByNode: task.Status.ArtifactResultsByNode}}) + if err != nil { + return err + } + _, err = artifactGCTaskInterface.Patch(context.Background(), task.Name, types.MergePatchType, patch, metav1.PatchOptions{}, "status") + if err != nil { + return err + } + } + + return nil +} + +type resources struct { + Files map[string][]byte +} + +func (r resources) GetSecret(ctx context.Context, name, key string) (string, error) { + + path := filepath.Join(common.SecretVolMountPath, name, key) + if file, ok := r.Files[path]; ok { + return string(file), nil + } + + file, err := os.ReadFile(path) + if err != nil { + return "", err + } else { + r.Files[path] = file + return string(file), err + } +} + +func (r resources) GetConfigMapKey(ctx context.Context, name, key string) (string, error) { + return "", fmt.Errorf("not supported") +} diff --git a/cmd/argoexec/commands/artifact/root.go b/cmd/argoexec/commands/artifact/root.go new file mode 100644 index 000000000000..898f412eaa4c --- /dev/null +++ b/cmd/argoexec/commands/artifact/root.go @@ -0,0 +1,13 @@ +package artifact + +import ( + "github.com/spf13/cobra" +) + +func NewArtifactCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "artifact", + } + cmd.AddCommand(NewArtifactDeleteCommand()) + return cmd +} diff --git a/cmd/argoexec/commands/emissary.go b/cmd/argoexec/commands/emissary.go index b884749a21b1..4ad2caf015ef 100644 --- a/cmd/argoexec/commands/emissary.go +++ b/cmd/argoexec/commands/emissary.go @@ -16,6 +16,8 @@ import ( "syscall" "time" + "github.com/argoproj/argo-workflows/v3/util/errors" + log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "k8s.io/client-go/util/retry" @@ -60,18 +62,6 @@ func NewEmissaryCommand() *cobra.Command { name, args := args[0], args[1:] - signals := make(chan os.Signal, 1) - defer close(signals) - signal.Notify(signals) - defer signal.Reset() - go func() { - for s := range signals { - if !osspecific.IsSIGCHLD(s) { - _ = osspecific.Kill(-os.Getpid(), s.(syscall.Signal)) - } - } - }() - data, err := ioutil.ReadFile(varRunArgo + "/template") if err != nil { return fmt.Errorf("failed to read template: %w", err) @@ -127,25 +117,28 @@ func NewEmissaryCommand() *cobra.Command { return fmt.Errorf("failed to get retry strategy: %w", err) } - var command *exec.Cmd - var stdout *os.File - var combined *os.File cmdErr := retry.OnError(backoff, func(error) bool { return true }, func() error { - if stdout != nil { - stdout.Close() - } - if combined != nil { - combined.Close() - } - command, stdout, combined, err = createCommand(name, args, template) + command, stdout, combined, err := createCommand(name, args, template) if err != nil { return fmt.Errorf("failed to create command: %w", err) } - + defer stdout.Close() + defer combined.Close() + signals := make(chan os.Signal, 1) + defer close(signals) + signal.Notify(signals) + defer signal.Reset() if err := command.Start(); err != nil { return err } - + go func() { + for s := range signals { + if !osspecific.IsSIGCHLD(s) { + _ = osspecific.Kill(command.Process.Pid, s.(syscall.Signal)) + } + } + }() + pid := command.Process.Pid ctx, cancel := context.WithCancel(context.Background()) defer cancel() go func() { @@ -158,16 +151,16 @@ func NewEmissaryCommand() *cobra.Command { _ = os.Remove(varRunArgo + "/ctr/" + containerName + "/signal") s, _ := strconv.Atoi(string(data)) if s > 0 { - _ = osspecific.Kill(command.Process.Pid, syscall.Signal(s)) + _ = osspecific.Kill(pid, syscall.Signal(s)) } time.Sleep(2 * time.Second) } } }() - return command.Wait() + return osspecific.Wait(command.Process) + }) - defer stdout.Close() - defer combined.Close() + logger.WithError(err).Info("sub-process exited") if _, ok := os.LookupEnv("ARGO_DEBUG_PAUSE_AFTER"); ok { for { @@ -184,7 +177,7 @@ func NewEmissaryCommand() *cobra.Command { if cmdErr == nil { exitCode = 0 - } else if exitError, ok := cmdErr.(*exec.ExitError); ok { + } else if exitError, ok := cmdErr.(errors.Exited); ok { if exitError.ExitCode() >= 0 { exitCode = exitError.ExitCode() } else { diff --git a/cmd/argoexec/commands/emissary_test.go b/cmd/argoexec/commands/emissary_test.go index 3338137b05d5..2c2d56306579 100644 --- a/cmd/argoexec/commands/emissary_test.go +++ b/cmd/argoexec/commands/emissary_test.go @@ -1,15 +1,15 @@ package commands import ( + "fmt" "io/ioutil" "os" - "os/exec" - "path/filepath" "strconv" "sync" "syscall" "testing" - "time" + + "github.com/argoproj/argo-workflows/v3/util/errors" "github.com/stretchr/testify/assert" ) @@ -20,16 +20,11 @@ func TestEmissary(t *testing.T) { varRunArgo = tmp includeScriptOutput = true - wd, err := os.Getwd() - assert.NoError(t, err) - - x := filepath.Join(wd, "../../../dist/argosay") - - err = ioutil.WriteFile(varRunArgo+"/template", []byte(`{}`), 0o600) + err := ioutil.WriteFile(varRunArgo+"/template", []byte(`{}`), 0o600) assert.NoError(t, err) t.Run("Exit0", func(t *testing.T) { - err := run(x, []string{"exit"}) + err := run("exit") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/ctr/main/exitcode") assert.NoError(t, err) @@ -37,28 +32,28 @@ func TestEmissary(t *testing.T) { }) t.Run("Exit1", func(t *testing.T) { - err := run(x, []string{"exit", "1"}) - assert.Equal(t, 1, err.(*exec.ExitError).ExitCode()) + err := run("exit 1") + assert.Equal(t, 1, err.(errors.Exited).ExitCode()) data, err := ioutil.ReadFile(varRunArgo + "/ctr/main/exitcode") assert.NoError(t, err) assert.Equal(t, "1", string(data)) }) t.Run("Stdout", func(t *testing.T) { - err := run(x, []string{"echo", "hello", "/dev/stdout"}) + err := run("echo hello") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/ctr/main/stdout") assert.NoError(t, err) assert.Contains(t, string(data), "hello") }) t.Run("Comined", func(t *testing.T) { - err := run(x, []string{"echo", "hello", "/dev/stderr"}) + err := run("echo hello > /dev/stderr") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/ctr/main/combined") assert.NoError(t, err) assert.Contains(t, string(data), "hello") }) t.Run("Signal", func(t *testing.T) { - for signal, message := range map[syscall.Signal]string{ + for signal := range map[syscall.Signal]string{ syscall.SIGTERM: "terminated", syscall.SIGKILL: "killed", } { @@ -68,10 +63,10 @@ func TestEmissary(t *testing.T) { wg.Add(1) go func() { defer wg.Done() - err := run(x, []string{"sleep", "5s"}) - assert.EqualError(t, err, "signal: "+message) + err := run("sleep 3") + assert.EqualError(t, err, fmt.Sprintf("exit status %d", 128+signal)) }() - time.Sleep(time.Second) + wg.Wait() } }) t.Run("Artifact", func(t *testing.T) { @@ -85,7 +80,7 @@ func TestEmissary(t *testing.T) { } `), 0o600) assert.NoError(t, err) - err := run(x, []string{"echo", "hello", "/tmp/artifact"}) + err := run("echo hello > /tmp/artifact") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/outputs/artifacts/tmp/artifact.tgz") assert.NoError(t, err) @@ -102,7 +97,7 @@ func TestEmissary(t *testing.T) { } `), 0o600) assert.NoError(t, err) - err := run(x, []string{"echo", "hello", "/tmp/artifact"}) + err := run("echo hello > /tmp/artifact") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/outputs/artifacts/tmp/artifact.tgz") assert.NoError(t, err) @@ -121,7 +116,7 @@ func TestEmissary(t *testing.T) { } `), 0o600) assert.NoError(t, err) - err := run(x, []string{"echo", "hello", "/tmp/parameter"}) + err := run("echo hello > /tmp/parameter") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/outputs/parameters/tmp/parameter") assert.NoError(t, err) @@ -151,7 +146,7 @@ func TestEmissary(t *testing.T) { `), 0o600) assert.NoError(t, err) _ = os.Remove("test.txt") - err = run(x, []string{"sh", "./test/containerSetRetryTest.sh", "/tmp/artifact"}) + err = run("sh ./test/containerSetRetryTest.sh /tmp/artifact") assert.Error(t, err) data, err := ioutil.ReadFile(varRunArgo + "/outputs/artifacts/tmp/artifact.tgz") assert.NoError(t, err) @@ -181,7 +176,7 @@ func TestEmissary(t *testing.T) { `), 0o600) assert.NoError(t, err) _ = os.Remove("test.txt") - err = run(x, []string{"sh", "./test/containerSetRetryTest.sh", "/tmp/artifact"}) + err = run("sh ./test/containerSetRetryTest.sh /tmp/artifact") assert.NoError(t, err) data, err := ioutil.ReadFile(varRunArgo + "/outputs/artifacts/tmp/artifact.tgz") assert.NoError(t, err) @@ -189,8 +184,8 @@ func TestEmissary(t *testing.T) { }) } -func run(name string, args []string) error { +func run(script string) error { cmd := NewEmissaryCommand() containerName = "main" - return cmd.RunE(cmd, append([]string{name}, args...)) + return cmd.RunE(cmd, append([]string{"sh", "-c"}, script)) } diff --git a/cmd/argoexec/commands/kill.go b/cmd/argoexec/commands/kill.go new file mode 100644 index 000000000000..05ebcadf5cde --- /dev/null +++ b/cmd/argoexec/commands/kill.go @@ -0,0 +1,37 @@ +package commands + +import ( + "fmt" + "os" + "strconv" + "syscall" + + "github.com/spf13/cobra" +) + +func NewKillCommand() *cobra.Command { + return &cobra.Command{ + Use: "kill SIGNAL PID", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + signum, err := strconv.Atoi(args[0]) + if err != nil { + return err + } + pid, err := strconv.Atoi(args[1]) + if err != nil { + return err + } + sig := syscall.Signal(signum) + p, err := os.FindProcess(pid) + if err != nil { + return err + } + fmt.Printf("killing %d with %v\n", pid, sig) + if err := p.Signal(sig); err != nil { + return err + } + return nil + }, + } +} diff --git a/cmd/argoexec/commands/resource.go b/cmd/argoexec/commands/resource.go index fc3a6d84d3a8..94ca7686a945 100644 --- a/cmd/argoexec/commands/resource.go +++ b/cmd/argoexec/commands/resource.go @@ -45,8 +45,18 @@ func execResource(ctx context.Context, action string) error { wfExecutor.AddError(err) return err } + manifestPath := common.ExecutorResourceManifestPath + if wfExecutor.Template.Resource.ManifestFrom != nil { + targetArtName := wfExecutor.Template.Resource.ManifestFrom.Artifact.Name + for _, art := range wfExecutor.Template.Inputs.Artifacts { + if art.Name == targetArtName { + manifestPath = art.Path + break + } + } + } resourceNamespace, resourceName, selfLink, err := wfExecutor.ExecResource( - action, common.ExecutorResourceManifestPath, wfExecutor.Template.Resource.Flags, + action, manifestPath, wfExecutor.Template.Resource.Flags, ) if err != nil { wfExecutor.AddError(err) diff --git a/cmd/argoexec/commands/root.go b/cmd/argoexec/commands/root.go index f492c2e6e384..c01aa58c02dc 100644 --- a/cmd/argoexec/commands/root.go +++ b/cmd/argoexec/commands/root.go @@ -16,6 +16,7 @@ import ( "k8s.io/client-go/tools/clientcmd" "github.com/argoproj/argo-workflows/v3" + "github.com/argoproj/argo-workflows/v3/cmd/argoexec/commands/artifact" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned" "github.com/argoproj/argo-workflows/v3/util" @@ -23,10 +24,7 @@ import ( "github.com/argoproj/argo-workflows/v3/util/logs" "github.com/argoproj/argo-workflows/v3/workflow/common" "github.com/argoproj/argo-workflows/v3/workflow/executor" - "github.com/argoproj/argo-workflows/v3/workflow/executor/docker" "github.com/argoproj/argo-workflows/v3/workflow/executor/emissary" - "github.com/argoproj/argo-workflows/v3/workflow/executor/kubelet" - "github.com/argoproj/argo-workflows/v3/workflow/executor/pns" ) const ( @@ -63,10 +61,12 @@ func NewRootCommand() *cobra.Command { command.AddCommand(NewAgentCommand()) command.AddCommand(NewEmissaryCommand()) command.AddCommand(NewInitCommand()) + command.AddCommand(NewKillCommand()) command.AddCommand(NewResourceCommand()) command.AddCommand(NewWaitCommand()) command.AddCommand(NewDataCommand()) command.AddCommand(cmd.NewVersionCmd(CLIName)) + command.AddCommand(artifact.NewArtifactCommand()) clientConfig = kubecli.AddKubectlFlagsToCmd(&command) command.PersistentFlags().StringVar(&logLevel, "loglevel", "info", "Set the logging level. One of: debug|info|warn|error") @@ -78,11 +78,10 @@ func NewRootCommand() *cobra.Command { func initExecutor() *executor.WorkflowExecutor { version := argo.GetVersion() - executorType := os.Getenv(common.EnvVarContainerRuntimeExecutor) - log.WithFields(log.Fields{"version": version.Version, "executorType": executorType}).Info("Starting Workflow Executor") + log.WithFields(log.Fields{"version": version.Version}).Info("Starting Workflow Executor") config, err := clientConfig.ClientConfig() checkErr(err) - config = restclient.AddUserAgent(config, fmt.Sprintf("argo-workflows/%s argo-executor/%s", version.Version, executorType)) + config = restclient.AddUserAgent(config, fmt.Sprintf("argo-workflows/%s argo-executor", version.Version)) logs.AddK8SLogTransportWrapper(config) // lets log all request as we should typically do < 5 per pod, so this is will show up problems @@ -110,18 +109,7 @@ func initExecutor() *executor.WorkflowExecutor { annotationPatchTickDuration, _ := time.ParseDuration(os.Getenv(common.EnvVarProgressPatchTickDuration)) progressFileTickDuration, _ := time.ParseDuration(os.Getenv(common.EnvVarProgressFileTickDuration)) - var cre executor.ContainerRuntimeExecutor - log.Infof("Creating a %s executor", executorType) - switch executorType { - case common.ContainerRuntimeExecutorKubelet: - cre, err = kubelet.NewKubeletExecutor(namespace, podName) - case common.ContainerRuntimeExecutorPNS: - cre, err = pns.NewPNSExecutor(clientset, podName, namespace) - case common.ContainerRuntimeExecutorDocker: - cre, err = docker.NewDockerExecutor(namespace, podName) - default: - cre, err = emissary.New() - } + cre, err := emissary.New() checkErr(err) wfExecutor := executor.NewExecutor( @@ -129,10 +117,10 @@ func initExecutor() *executor.WorkflowExecutor { versioned.NewForConfigOrDie(config).ArgoprojV1alpha1().WorkflowTaskResults(namespace), restClient, podName, + types.UID(os.Getenv(common.EnvVarPodUID)), os.Getenv(common.EnvVarWorkflowName), os.Getenv(common.EnvVarNodeID), namespace, - types.UID(os.Getenv(common.EnvVarWorkflowUID)), cre, *tmpl, includeScriptOutput, diff --git a/cmd/argoexec/commands/wait.go b/cmd/argoexec/commands/wait.go index 075c4a1ab2fc..c3b66e39e4fa 100644 --- a/cmd/argoexec/commands/wait.go +++ b/cmd/argoexec/commands/wait.go @@ -2,6 +2,8 @@ package commands import ( "context" + "os/signal" + "syscall" "time" "github.com/argoproj/pkg/stats" @@ -30,27 +32,24 @@ func waitContainer(ctx context.Context) error { defer stats.LogStats() stats.StartStatsTicker(5 * time.Minute) - defer func() { - if err := wfExecutor.KillSidecars(ctx); err != nil { + // use a block to constrain the scope of ctx + { + // this allows us to gracefully shutdown, capturing artifacts + ctx, cancel := signal.NotifyContext(ctx, syscall.SIGTERM) + defer cancel() + + // Wait for main container to complete + err := wfExecutor.Wait(ctx) + if err != nil { wfExecutor.AddError(err) } - }() - - // Wait for main container to complete - err := wfExecutor.Wait(ctx) - if err != nil { - wfExecutor.AddError(err) } // Capture output script result - err = wfExecutor.CaptureScriptResult(ctx) - if err != nil { - wfExecutor.AddError(err) - } - // Saving logs - logArt, err := wfExecutor.SaveLogs(ctx) + err := wfExecutor.CaptureScriptResult(ctx) if err != nil { wfExecutor.AddError(err) } + // Saving output parameters err = wfExecutor.SaveParameters(ctx) if err != nil { @@ -61,11 +60,7 @@ func waitContainer(ctx context.Context) error { if err != nil { wfExecutor.AddError(err) } - // Annotating pod with output - err = wfExecutor.ReportOutputs(ctx, logArt) - if err != nil { - wfExecutor.AddError(err) - } + wfExecutor.SaveLogs(ctx) return wfExecutor.HasError() } diff --git a/cmd/argoexec/main.go b/cmd/argoexec/main.go index e13d6d4329eb..1bd6c581bc19 100644 --- a/cmd/argoexec/main.go +++ b/cmd/argoexec/main.go @@ -2,7 +2,8 @@ package main import ( "os" - "os/exec" + + "github.com/argoproj/argo-workflows/v3/util/errors" // load authentication plugin for obtaining credentials from cloud providers. _ "k8s.io/client-go/plugin/pkg/client/auth" @@ -14,7 +15,7 @@ import ( func main() { err := commands.NewRootCommand().Execute() if err != nil { - if exitError, ok := err.(*exec.ExitError); ok { + if exitError, ok := err.(errors.Exited); ok { if exitError.ExitCode() >= 0 { os.Exit(exitError.ExitCode()) } else { diff --git a/cmd/workflow-controller/main.go b/cmd/workflow-controller/main.go index 357ffc21ca73..9b8a3964c53a 100644 --- a/cmd/workflow-controller/main.go +++ b/cmd/workflow-controller/main.go @@ -33,6 +33,7 @@ import ( "github.com/argoproj/argo-workflows/v3/util/env" "github.com/argoproj/argo-workflows/v3/util/logs" pprofutil "github.com/argoproj/argo-workflows/v3/util/pprof" + "github.com/argoproj/argo-workflows/v3/workflow/common" "github.com/argoproj/argo-workflows/v3/workflow/controller" "github.com/argoproj/argo-workflows/v3/workflow/events" "github.com/argoproj/argo-workflows/v3/workflow/metrics" @@ -109,7 +110,7 @@ func NewRootCommand() *cobra.Command { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - wfController, err := controller.NewWorkflowController(ctx, config, kubeclientset, wfclientset, namespace, managedNamespace, executorImage, executorImagePullPolicy, containerRuntimeExecutor, configMap, executorPlugins) + wfController, err := controller.NewWorkflowController(ctx, config, kubeclientset, wfclientset, namespace, managedNamespace, executorImage, executorImagePullPolicy, logFormat, containerRuntimeExecutor, configMap, executorPlugins) errors.CheckError(err) leaderElectionOff := os.Getenv("LEADER_ELECTION_DISABLE") @@ -165,7 +166,7 @@ func NewRootCommand() *cobra.Command { clientConfig = kubecli.AddKubectlFlagsToCmd(&command) command.AddCommand(cmdutil.NewVersionCmd(CLIName)) - command.Flags().StringVar(&configMap, "configmap", "workflow-controller-configmap", "Name of K8s configmap to retrieve workflow controller configuration") + command.Flags().StringVar(&configMap, "configmap", common.ConfigMapName, "Name of K8s configmap to retrieve workflow controller configuration") command.Flags().StringVar(&executorImage, "executor-image", "", "Executor image to use (overrides value in configmap)") command.Flags().StringVar(&executorImagePullPolicy, "executor-image-pull-policy", "", "Executor imagePullPolicy to use (overrides value in configmap)") command.Flags().StringVar(&containerRuntimeExecutor, "container-runtime-executor", "", "Container runtime executor to use (overrides value in configmap)") diff --git a/config/config.go b/config/config.go index 7a714c8084c9..96f34ea584b6 100644 --- a/config/config.go +++ b/config/config.go @@ -3,6 +3,7 @@ package config import ( "fmt" "math" + "net/url" "time" apiv1 "k8s.io/api/core/v1" @@ -12,8 +13,6 @@ import ( wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) -var EmptyConfigFunc = func() interface{} { return &Config{} } - type ResourceRateLimit struct { Limit float64 `json:"limit"` Burst int `json:"burst"` @@ -34,17 +33,6 @@ type Config struct { // KubeConfig specifies a kube config file for the wait & init containers KubeConfig *KubeConfig `json:"kubeConfig,omitempty"` - // ContainerRuntimeExecutor specifies the container runtime interface to use, default is emissary - ContainerRuntimeExecutor string `json:"containerRuntimeExecutor,omitempty"` - - ContainerRuntimeExecutors ContainerRuntimeExecutors `json:"containerRuntimeExecutors,omitempty"` - - // KubeletPort is needed when using the kubelet containerRuntimeExecutor, default to 10250 - KubeletPort int `json:"kubeletPort,omitempty"` - - // KubeletInsecure disable the TLS verification of the kubelet containerRuntimeExecutor, default to false - KubeletInsecure bool `json:"kubeletInsecure,omitempty"` - // ArtifactRepository contains the default location of an artifact repository for container artifacts ArtifactRepository wfv1.ArtifactRepository `json:"artifactRepository,omitempty"` @@ -83,9 +71,6 @@ type Config struct { // Links to related apps. Links []*wfv1.Link `json:"links,omitempty"` - // Config customized Docker Sock path - DockerSockPath string `json:"dockerSockPath,omitempty"` - // WorkflowDefaults are values that will apply to all Workflows from this controller, unless overridden on the Workflow-level WorkflowDefaults *wfv1.Workflow `json:"workflowDefaults,omitempty"` @@ -116,17 +101,9 @@ type Config struct { // NavColor is an ui navigation bar background color NavColor string `json:"navColor,omitempty"` -} -func (c Config) GetContainerRuntimeExecutor(labels labels.Labels) (string, error) { - name, err := c.ContainerRuntimeExecutors.Select(labels) - if err != nil { - return "", err - } - if name != "" { - return name, nil - } - return c.ContainerRuntimeExecutor, nil + // SSO in settings for single-sign on + SSO SSOConfig `json:"sso,omitempty"` } func (c Config) GetExecutor() *apiv1.Container { @@ -154,6 +131,32 @@ func (c Config) GetPodGCDeleteDelayDuration() time.Duration { return c.PodGCDeleteDelayDuration.Duration } +func (c Config) ValidateProtocol(inputProtocol string, allowedProtocol []string) error { + for _, protocol := range allowedProtocol { + if inputProtocol == protocol { + return nil + } + } + return fmt.Errorf("protocol %s is not allowed", inputProtocol) +} + +func (c *Config) Sanitize(allowedProtocol []string) error { + links := c.Links + + for _, link := range links { + u, err := url.Parse(link.URL) + if err != nil { + return err + } + err = c.ValidateProtocol(u.Scheme, allowedProtocol) + if err != nil { + return err + } + link.URL = u.String() // reassembles the URL into a valid URL string + } + return nil +} + // PodSpecLogStrategy contains the configuration for logging the pod spec in controller log for debugging purpose type PodSpecLogStrategy struct { FailedPod bool `json:"failedPod,omitempty"` diff --git a/config/config_test.go b/config/config_test.go index b8198a90876a..09faa3b7ccb8 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -4,8 +4,8 @@ import ( "testing" "github.com/stretchr/testify/assert" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/labels" + + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) func TestDatabaseConfig(t *testing.T) { @@ -13,30 +13,21 @@ func TestDatabaseConfig(t *testing.T) { assert.Equal(t, "my-host:1234", DatabaseConfig{Host: "my-host", Port: 1234}.GetHostname()) } -func TestContainerRuntimeExecutor(t *testing.T) { - t.Run("Default", func(t *testing.T) { - c := Config{ContainerRuntimeExecutor: "foo"} - executor, err := c.GetContainerRuntimeExecutor(labels.Set{}) - assert.NoError(t, err) - assert.Equal(t, "foo", executor) - }) - t.Run("Error", func(t *testing.T) { - c := Config{ContainerRuntimeExecutor: "foo", ContainerRuntimeExecutors: ContainerRuntimeExecutors{ - {Name: "bar", Selector: metav1.LabelSelector{ - MatchLabels: map[string]string{"!": "!"}, - }}, - }} - _, err := c.GetContainerRuntimeExecutor(labels.Set{}) - assert.Error(t, err) - }) - t.Run("NoError", func(t *testing.T) { - c := Config{ContainerRuntimeExecutor: "foo", ContainerRuntimeExecutors: ContainerRuntimeExecutors{ - {Name: "bar", Selector: metav1.LabelSelector{ - MatchLabels: map[string]string{"baz": "qux"}, - }}, - }} - executor, err := c.GetContainerRuntimeExecutor(labels.Set(map[string]string{"baz": "qux"})) - assert.NoError(t, err) - assert.Equal(t, "bar", executor) - }) +func TestSanitize(t *testing.T) { + tests := []struct { + c Config + err string + }{ + {Config{Links: []*wfv1.Link{{URL: "javascript:foo"}}}, "protocol javascript is not allowed"}, + {Config{Links: []*wfv1.Link{{URL: "javASCRipt: //foo"}}}, "protocol javascript is not allowed"}, + {Config{Links: []*wfv1.Link{{URL: "http://foo.bar/?foo=bar"}}}, ""}, + } + for _, tt := range tests { + err := tt.c.Sanitize([]string{"http", "https"}) + if tt.err != "" { + assert.Equal(t, err.Error(), tt.err) + } else { + assert.Nil(t, err) + } + } } diff --git a/config/controller.go b/config/controller.go index 01ff517589ad..66b916e55c73 100644 --- a/config/controller.go +++ b/config/controller.go @@ -5,60 +5,37 @@ import ( "fmt" "strings" - log "github.com/sirupsen/logrus" apiv1 "k8s.io/api/core/v1" - apierr "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/fields" - "k8s.io/apimachinery/pkg/runtime" - runtimeutil "k8s.io/apimachinery/pkg/util/runtime" - "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/kubernetes" - "k8s.io/client-go/tools/cache" "sigs.k8s.io/yaml" ) type Controller interface { - Run(stopCh <-chan struct{}, onChange func(config interface{}) error) - Get(context.Context) (interface{}, error) + Get(context.Context) (*Config, error) } type controller struct { namespace string // name of the config map - configMap string - kubeclientset kubernetes.Interface - emptyConfigFunc func() interface{} // must return a pointer, non-nil + configMap string + kubeclientset kubernetes.Interface } -func NewController(namespace, name string, kubeclientset kubernetes.Interface, emptyConfigFunc func() interface{}) Controller { - log.WithField("name", name).Info("config map") +func NewController(namespace, name string, kubeclientset kubernetes.Interface) Controller { return &controller{ - namespace: namespace, - configMap: name, - kubeclientset: kubeclientset, - emptyConfigFunc: emptyConfigFunc, + namespace: namespace, + configMap: name, + kubeclientset: kubeclientset, } } -func (cc *controller) updateConfig(cm *apiv1.ConfigMap, onChange func(config interface{}) error) error { - config, err := cc.parseConfigMap(cm) - if err != nil { - return err - } - return onChange(config) -} - -func (cc *controller) parseConfigMap(cm *apiv1.ConfigMap) (interface{}, error) { - config := cc.emptyConfigFunc() - if cm == nil { - return config, nil - } +func parseConfigMap(cm *apiv1.ConfigMap, config *Config) error { // The key in the configmap to retrieve workflow configuration from. // Content encoding is expected to be YAML. rawConfig, ok := cm.Data["config"] if ok && len(cm.Data) != 1 { - return config, fmt.Errorf("if you have an item in your config map named 'config', you must only have one item") + return fmt.Errorf("if you have an item in your config map named 'config', you must only have one item") } if !ok { for name, value := range cm.Data { @@ -70,64 +47,16 @@ func (cc *controller) parseConfigMap(cm *apiv1.ConfigMap) (interface{}, error) { } } } - err := yaml.Unmarshal([]byte(rawConfig), config) - return config, err + err := yaml.UnmarshalStrict([]byte(rawConfig), config) + return err } -func (cc *controller) Run(stopCh <-chan struct{}, onChange func(config interface{}) error) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) - - restClient := cc.kubeclientset.CoreV1().RESTClient() - resource := "configmaps" - fieldSelector := fields.ParseSelectorOrDie(fmt.Sprintf("metadata.name=%s", cc.configMap)) - ctx := context.Background() - listFunc := func(options metav1.ListOptions) (runtime.Object, error) { - options.FieldSelector = fieldSelector.String() - req := restClient.Get(). - Namespace(cc.namespace). - Resource(resource). - VersionedParams(&options, metav1.ParameterCodec) - return req.Do(ctx).Get() - } - watchFunc := func(options metav1.ListOptions) (watch.Interface, error) { - options.Watch = true - options.FieldSelector = fieldSelector.String() - req := restClient.Get(). - Namespace(cc.namespace). - Resource(resource). - VersionedParams(&options, metav1.ParameterCodec) - return req.Watch(ctx) - } - source := &cache.ListWatch{ListFunc: listFunc, WatchFunc: watchFunc} - _, controller := cache.NewInformer( - source, - &apiv1.ConfigMap{}, - 0, - cache.ResourceEventHandlerFuncs{ - UpdateFunc: func(old, new interface{}) { - oldCM := old.(*apiv1.ConfigMap) - newCM := new.(*apiv1.ConfigMap) - if oldCM.ResourceVersion == newCM.ResourceVersion { - return - } - if newCm, ok := new.(*apiv1.ConfigMap); ok { - log.Infof("Detected ConfigMap update.") - err := cc.updateConfig(newCm, onChange) - if err != nil { - log.Errorf("Update of config failed due to: %v", err) - } - } - }, - }) - controller.Run(stopCh) - log.Info("Watching config map updates") -} - -func (cc *controller) Get(ctx context.Context) (interface{}, error) { +func (cc *controller) Get(ctx context.Context) (*Config, error) { + config := &Config{} cmClient := cc.kubeclientset.CoreV1().ConfigMaps(cc.namespace) cm, err := cmClient.Get(ctx, cc.configMap, metav1.GetOptions{}) - if err != nil && !apierr.IsNotFound(err) { - return cc.emptyConfigFunc(), err + if err != nil { + return nil, err } - return cc.parseConfigMap(cm) + return config, parseConfigMap(cm, config) } diff --git a/config/controller_test.go b/config/controller_test.go index 7170760392db..bbad50ec3306 100644 --- a/config/controller_test.go +++ b/config/controller_test.go @@ -1,28 +1,21 @@ package config import ( - "context" "testing" "github.com/stretchr/testify/assert" apiv1 "k8s.io/api/core/v1" - "k8s.io/client-go/kubernetes/fake" ) func Test_parseConfigMap(t *testing.T) { - cc := controller{emptyConfigFunc: EmptyConfigFunc} t.Run("Empty", func(t *testing.T) { - _, err := cc.parseConfigMap(&apiv1.ConfigMap{}) + c := &Config{} + err := parseConfigMap(&apiv1.ConfigMap{}, c) assert.NoError(t, err) }) - t.Run("Config", func(t *testing.T) { - c, err := cc.parseConfigMap(&apiv1.ConfigMap{Data: map[string]string{"config": "containerRuntimeExecutor: pns"}}) - if assert.NoError(t, err) { - assert.Equal(t, "pns", c.(*Config).ContainerRuntimeExecutor) - } - }) t.Run("Complex", func(t *testing.T) { - c, err := cc.parseConfigMap(&apiv1.ConfigMap{Data: map[string]string{"containerRuntimeExecutor": "pns", "artifactRepository": ` archiveLogs: true + c := &Config{} + err := parseConfigMap(&apiv1.ConfigMap{Data: map[string]string{"artifactRepository": ` archiveLogs: true s3: bucket: my-bucket endpoint: minio:9000 @@ -32,24 +25,14 @@ func Test_parseConfigMap(t *testing.T) { key: accesskey secretKeySecret: name: my-minio-cred - key: secretkey`}}) + key: secretkey`}}, c) if assert.NoError(t, err) { - assert.Equal(t, "pns", c.(*Config).ContainerRuntimeExecutor) - assert.NotEmpty(t, c.(*Config).ArtifactRepository) + assert.NotEmpty(t, c.ArtifactRepository) } }) - t.Run("IgnoreGarbage", func(t *testing.T) { - _, err := cc.parseConfigMap(&apiv1.ConfigMap{Data: map[string]string{"garbage": "garbage"}}) - assert.NoError(t, err) + t.Run("Garbage", func(t *testing.T) { + c := &Config{} + err := parseConfigMap(&apiv1.ConfigMap{Data: map[string]string{"garbage": "garbage"}}, c) + assert.Error(t, err) }) } - -func Test_controller_Get(t *testing.T) { - kube := fake.NewSimpleClientset() - c := controller{configMap: "my-config-map", kubeclientset: kube, emptyConfigFunc: EmptyConfigFunc} - ctx := context.Background() - config, err := c.Get(ctx) - if assert.NoError(t, err) { - assert.Empty(t, config) - } -} diff --git a/config/image.go b/config/image.go index c99d8d2021f1..be061068da4e 100644 --- a/config/image.go +++ b/config/image.go @@ -1,6 +1,6 @@ package config type Image struct { - Command []string `json:"command"` - Args []string `json:"args,omitempty"` + Entrypoint []string `json:"entrypoint,omitempty"` + Cmd []string `json:"cmd,omitempty"` } diff --git a/config/rbac.go b/config/rbac.go new file mode 100644 index 000000000000..8cdf3e8d3250 --- /dev/null +++ b/config/rbac.go @@ -0,0 +1,9 @@ +package config + +type RBACConfig struct { + Enabled bool `json:"enabled,omitempty"` +} + +func (c *RBACConfig) IsEnabled() bool { + return c != nil && c.Enabled +} diff --git a/config/sso.go b/config/sso.go new file mode 100644 index 000000000000..4c1a18254e2f --- /dev/null +++ b/config/sso.go @@ -0,0 +1,31 @@ +package config + +import ( + "time" + + apiv1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +type SSOConfig struct { + Issuer string `json:"issuer"` + IssuerAlias string `json:"issuerAlias,omitempty"` + ClientID apiv1.SecretKeySelector `json:"clientId"` + ClientSecret apiv1.SecretKeySelector `json:"clientSecret"` + RedirectURL string `json:"redirectUrl"` + RBAC *RBACConfig `json:"rbac,omitempty"` + // additional scopes (on top of "openid") + Scopes []string `json:"scopes,omitempty"` + SessionExpiry metav1.Duration `json:"sessionExpiry,omitempty"` + // customGroupClaimName will override the groups claim name + CustomGroupClaimName string `json:"customGroupClaimName,omitempty"` + UserInfoPath string `json:"userInfoPath,omitempty"` + InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` +} + +func (c SSOConfig) GetSessionExpiry() time.Duration { + if c.SessionExpiry.Duration > 0 { + return c.SessionExpiry.Duration + } + return 10 * time.Hour +} diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 0b7d8a7adc40..aec853da5e50 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -10,8 +10,8 @@ See [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/master/code-o ## Contributor Meetings -A weekly opportunity for committers and maintainers of Workflows, Events, and Dataflow to discuss their current work and talk -about what’s next. Feel free to join us! For Contributor Meeting information, minutes and recordings +A weekly opportunity for committers and maintainers of Workflows, Events, and Dataflow to discuss their current work and +talk about what’s next. Feel free to join us! For Contributor Meeting information, minutes and recordings please [see here](https://bit.ly/argo-data-weekly). ## How To Contribute @@ -19,7 +19,9 @@ please [see here](https://bit.ly/argo-data-weekly). We're always looking for contributors. * Documentation - something missing or unclear? Please submit a pull request! -* Code contribution - investigate a [help wanted issue](https://github.com/argoproj/argo-workflows/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+label%3A%22good+first+issue%22), or anything labelled with "good first issue"? +* Code contribution - investigate + a [good first issue](https://github.com/argoproj/argo-workflows/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) + , or anything not assigned. * Join the `#argo-contributors` channel on [our Slack](https://argoproj.github.io/community/join-slack). * Get a [mentor](mentoring.md) to help you get started. @@ -27,9 +29,34 @@ We're always looking for contributors. To run Argo Workflows locally for development: [running locally](running-locally.md). +### Dependencies + +Dependencies increase the risk of security issues and have on-going maintenance costs. + +The dependency must pass these test: + +* A strong use case. +* It has an acceptable license (e.g. MIT). +* It is actively maintained. +* It has no security issues. + +Example, should we add `fasttemplate` +, [view the Snyk report](https://snyk.io/advisor/golang/github.com/valyala/fasttemplate): + +| Test | Outcome | +|-----------------------------------------|-------------------------------------| +| A strong use case. | ❌ Fail. We can use `text/template`. | +| It has an acceptable license (e.g. MIT) | ✅ Pass. MIT license. | +| It is actively maintained. | ❌ Fail. Project is inactive. | +| It has no security issues. | ✅ Pass. No known security issues. | + +No, we should not add that dependency. + ### Test Policy -Changes without either unit or e2e tests are unlikely to be accepted. See [the pull request template](https://github.com/argoproj/argo-workflows/blob/master/.github/pull_request_template.md). +Changes without either unit or e2e tests are unlikely to be accepted. +See [the pull request template](https://github.com/argoproj/argo-workflows/blob/master/.github/pull_request_template.md) +. ### Contributor Workshop diff --git a/docs/README.md b/docs/README.md index 78292bd060ce..2291bfe1c8a0 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,37 +1,17 @@ -

Argo Logo

+# Argo Workflows -[![CI](https://github.com/argoproj/argo-workflows/workflows/CI/badge.svg)](https://github.com/argoproj/argo-workflows/actions?query=event%3Apush+branch%3Amaster) -[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3830/badge)](https://bestpractices.coreinfrastructure.org/projects/3830) -[![slack](https://img.shields.io/badge/slack-argoproj-brightgreen.svg?logo=slack)](https://argoproj.github.io/community/join-slack) - -## What is Argo Workflows? -Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. Argo Workflows is implemented as a Kubernetes CRD (Custom Resource Definition). +Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. Argo +Workflows is implemented as a Kubernetes CRD (Custom Resource Definition). * Define workflows where each step in the workflow is a container. -* Model multi-step workflows as a sequence of tasks or capture the dependencies between tasks using a directed acyclic graph (DAG). -* Easily run compute intensive jobs for machine learning or data processing in a fraction of the time using Argo Workflows on Kubernetes. +* Model multi-step workflows as a sequence of tasks or capture the dependencies between tasks using a directed acyclic + graph (DAG). +* Easily run compute intensive jobs for machine learning or data processing in a fraction of the time using Argo + Workflows on Kubernetes. * Run CI/CD pipelines natively on Kubernetes without configuring complex software development products. -Argo is a [Cloud Native Computing Foundation (CNCF)](https://cncf.io/) hosted project. - [![Argo Workflows in 5 minutes](https://img.youtube.com/vi/TZgLkCFQ2tk/0.jpg)](https://www.youtube.com/watch?v=TZgLkCFQ2tk) -## Why Argo Workflows? -* Designed from the ground up for containers without the overhead and limitations of legacy VM and server-based environments. -* Cloud agnostic and can run on any Kubernetes cluster. -* Easily orchestrate highly parallel jobs on Kubernetes. -* Argo Workflows puts a cloud-scale supercomputer at your fingertips! - -# Argo Documentation - -### Getting Started - -For set-up information and running your first Workflows, please see our [Getting Started](quick-start.md) guide. - -### Examples - -For detailed examples about what Argo can do, please see our [documentation by example](https://github.com/argoproj/argo-workflows/blob/master/examples/README.md) page. - -### Fields +## Getting Started -For a full list of all the fields available in for use in Argo, and a link to examples where each is used, please see [Argo Fields](fields.md). +For set-up information and running your first Workflows, please see our [getting started guide](quick-start.md). diff --git a/docs/access-token.md b/docs/access-token.md index 2aa35e679f12..63ecb459d823 100644 --- a/docs/access-token.md +++ b/docs/access-token.md @@ -1,37 +1,41 @@ # Access Token ## Overview -If you want to automate tasks with the Argo Server API or CLI, you will need an access token. -## Pre-requisites +If you want to automate tasks with the Argo Server API or CLI, you will need an access token. + +## Prerequisites + Firstly, create a role with minimal permissions. This example role for jenkins only permission to update and list workflows: -```sh +```bash kubectl create role jenkins --verb=list,update --resource=workflows.argoproj.io ``` Create a service account for your service: -```sh +```bash kubectl create sa jenkins ``` ### Tip for Tokens Creation -Create a unique service account for each client: + +Create a unique service account for each client: - (a) you'll be able to correctly secure your workflows -- (b) [revoke the token](#token-revocation) without impacting other clients. +- (b) [revoke the token](#token-revocation) without impacting other clients. Bind the service account to the role (in this case in the `argo` namespace): -```sh +```bash kubectl create rolebinding jenkins --role=jenkins --serviceaccount=argo:jenkins ``` ## Token Creation + You now need to get a token: -```sh +```bash SECRET=$(kubectl get sa jenkins -o=jsonpath='{.secrets[0].name}') ARGO_TOKEN="Bearer $(kubectl get secret $SECRET -o=jsonpath='{.data.token}' | base64 --decode)" echo $ARGO_TOKEN @@ -39,18 +43,19 @@ Bearer ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNkltS... ``` ## Token Usage & Test + To use that token with the CLI you need to set `ARGO_SERVER` (see `argo --help`). Use that token in your API requests, e.g. to list workflows: -```sh +```bash curl https://localhost:2746/api/v1/workflows/argo -H "Authorization: $ARGO_TOKEN" # 200 OK ``` You should check you cannot do things you're not allowed! -```sh +```bash curl https://localhost:2746/api/v1/workflow-templates/argo -H "Authorization: $ARGO_TOKEN" # 403 error ``` @@ -59,29 +64,33 @@ curl https://localhost:2746/api/v1/workflow-templates/argo -H "Authorization: $A ### Set additional params to initialize Argo settings - ARGO_SERVER="${{HOST}}:443" - KUBECONFIG=/dev/null - ARGO_NAMESPACE=sandbox +```bash +ARGO_SERVER="${{HOST}}:443" +KUBECONFIG=/dev/null +ARGO_NAMESPACE=sandbox +``` ### Start container with settings above + > Note: Example for getting list of templates from an existing namespace - docker run --rm -it \ - -e ARGO_SERVER=$ARGO_SERVER \ - -e ARGO_TOKEN=$ARGO_TOKEN \ - -e ARGO_HTTP=false \ - -e ARGO_HTTP1=true \ - -e KUBECONFIG=/dev/null \ - -e ARGO_NAMESPACE=$ARGO_NAMESPACE \ - argoproj/argocli:latest template list -v -e -k +```bash +docker run --rm -it \ + -e ARGO_SERVER=$ARGO_SERVER \ + -e ARGO_TOKEN=$ARGO_TOKEN \ + -e ARGO_HTTP=false \ + -e ARGO_HTTP1=true \ + -e KUBECONFIG=/dev/null \ + -e ARGO_NAMESPACE=$ARGO_NAMESPACE \ + argoproj/argocli:latest template list -v -e -k +``` ## Token Revocation Token compromised? -```sh +```bash kubectl delete secret $SECRET ``` A new one will be created. - diff --git a/docs/architecture.md b/docs/architecture.md index 78c6eb46c1ce..43ca2dc9ae15 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -12,3 +12,12 @@ ![diagram](assets/architecture.jpeg) +## Various configurations for Argo UI and Argo Server + +The top diagram below shows what happens if you run "make start UI=true" locally (recommended if you need the UI during local development). This runs a React application (`Webpack` HTTP server) locally which serves the `index.html` and typescript files from port 8080. From the typescript code there are calls made to the back end API (Argo Server) at port 2746. The `Webpack` HTTP server is configured for hot reload, meaning the UI will update automatically based on local code changes. + +The second diagram is an alternative approach for rare occasions that the React files are broken and you're doing local development. In this case, everything is served from the Argo Server at port 2746. + +The third diagram shows how things are configured for a Kubernetes environment. It is similar to the second diagram in that the Argo Server hosts everything for the UI. + +![diagram](assets/argo-server-ui-configurations.png) diff --git a/docs/argo-server-auth-mode.md b/docs/argo-server-auth-mode.md index 4ee302c2cb32..e748f53adc3f 100644 --- a/docs/argo-server-auth-mode.md +++ b/docs/argo-server-auth-mode.md @@ -2,13 +2,14 @@ You can choose which kube config the Argo Server uses: -* "server" - in hosted mode, use the kube config of service account, in local mode, use your local kube config. -* "client" - requires clients to provide their Kubernetes bearer token and use that. -* ["sso"](./argo-server-sso.md) - since v2.9, use single sign-on, this will use the same service account as per "server" for RBAC. We expect to change this in the future so that the OAuth claims are mapped to service accounts. +* `server` - in hosted mode, use the kube config of service account, in local mode, use your local kube config. +* `client` - requires clients to provide their Kubernetes bearer token and use that. +* [`sso`](./argo-server-sso.md) - since v2.9, use single sign-on, this will use the same service account as per "server" for RBAC. We expect to change this in the future so that the OAuth claims are mapped to service accounts. The server used to start with auth mode of "server" by default, but since v3.0 it defaults to the "client". To change the server auth mode specify the list as multiple auth-mode flags: -``` + +```bash argo server --auth-mode sso --auth-mode ... ``` diff --git a/docs/argo-server-sso-argocd.md b/docs/argo-server-sso-argocd.md index 0983b7293307..0abcaeb70e1e 100644 --- a/docs/argo-server-sso-argocd.md +++ b/docs/argo-server-sso-argocd.md @@ -1,9 +1,10 @@ -# Use ArgoCD Dex for authentication +# Use Argo CD Dex for authentication It is possible to have the Argo Workflows Server use the Argo CD Dex instance for authentication, for instance if you use Okta with SAML which cannot integrate with Argo Workflows directly. In order to make this happen, you will need the following: -- You must be using at least Dex [v2.23.0](https://github.com/dexidp/dex/releases/tag/v2.23.0), because that's when `staticClients[].secretEnv` was added. That means ArgoCD 1.7.12 and above. -- A secret containing two keys, `client-id` and `client-secret` to be used by both Dex and Argo Workflows Server. `client-id` is `argo-workflows-sso` in this example, `client-secret` can be any random string. If ArgoCD and ArgoWorkflows are installed in different namespaces the secret must be present in both of them. Example: +- You must be using at least Dex [v2.23.0](https://github.com/dexidp/dex/releases/tag/v2.23.0), because that's when `staticClients[].secretEnv` was added. That means Argo CD 1.7.12 and above. +- A secret containing two keys, `client-id` and `client-secret` to be used by both Dex and Argo Workflows Server. `client-id` is `argo-workflows-sso` in this example, `client-secret` can be any random string. If Argo CD and Argo Workflows are installed in different namespaces the secret must be present in both of them. Example: + ```yaml apiVersion: v1 kind: Secret @@ -15,13 +16,15 @@ It is possible to have the Argo Workflows Server use the Argo CD Dex instance fo # client-secret is 'MY-SECRET-STRING-CAN-BE-UUID' client-secret: TVktU0VDUkVULVNUUklORy1DQU4tQkUtVVVJRA== ``` + - `--auth-mode=sso` server argument added - A Dex `staticClients` configured for `argo-workflows-sso` - The `sso` configuration filled out in Argo Workflows Server to match -## Example manifests for authenticating against ArgoCD's Dex (Kustomize) +## Example manifests for authenticating against Argo CD's Dex (Kustomize) + +In Argo CD, add an environment variable to Dex deployment and configuration: -In ArgoCD, add an environment variable to Dex deployment and configuration: ```yaml --- apiVersion: apps/v1 @@ -49,7 +52,7 @@ data: # Dex settings, but instead it will replace the entire configuration with the settings below, # so add these to the existing config instead of setting them in a separate file dex.config: | - # Setting staticClients allows ArgoWorkflows to use ArgoCD's Dex installation for authentication + # Setting staticClients allows Argo Workflows to use Argo CD's Dex installation for authentication staticClients: - id: argo-workflows-sso name: Argo Workflow @@ -59,6 +62,7 @@ data: ``` In Argo Workflows add `--auth-mode=sso` argument to argo-server deployment. + ```yaml --- apiVersion: apps/v1 @@ -101,9 +105,10 @@ data: redirectUrl: https://argo-workflows.mydomain.com/oauth2/callback ``` -## Example Helm chart configuration for authenticating against ArgoCD's Dex +## Example Helm chart configuration for authenticating against Argo CD's Dex `argo-cd/values.yaml`: + ```yaml dex: image: @@ -125,7 +130,8 @@ data: secretEnv: ARGO_WORKFLOWS_SSO_CLIENT_SECRET ``` -`argo/values.yaml`: +`argo-workflows/values.yaml`: + ```yaml server: extraArgs: diff --git a/docs/argo-server-sso.md b/docs/argo-server-sso.md index 9aea26c3eb1a..8d47a1afbce6 100644 --- a/docs/argo-server-sso.md +++ b/docs/argo-server-sso.md @@ -1,17 +1,16 @@ # Argo Server SSO -![GA](assets/ga.svg) - > v2.9 and after -It is possible to use [Dex](https://github.com/dexidp/dex) for authentication. [This document](argo-server-sso-argocd.md) describes how to set up ArgoWorkflows and ArgoCD so that ArgoWorkflows uses ArgoCD's Dex server for authentication. +It is possible to use [Dex](https://github.com/dexidp/dex) for authentication. [This document](argo-server-sso-argocd.md) describes how to set up Argo Workflows and Argo CD so that Argo Workflows uses Argo CD's Dex server for authentication. -## To start Argo Server with SSO. +## To start Argo Server with SSO -Firstly, configure the settings [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) with the correct OAuth 2 values. If working towards an oidc configuration the ArgoCD project has [guides](https://argoproj.github.io/argo-cd/operator-manual/user-management/#existing-oidc-provider) on its similar (though different) process for setting up oidc providers. It also includes examples for specific providers. The main difference is that the ArgoCD docs mention that their callback address endpoint is `/auth/callback`. For ArgoWorkflows, the default format is `/oauth2/callback` as shown in [this comment](https://github.com/argoproj/argo-workflows/blob/93c11a24ff06049c2197149acd787f702e5c1f9b/docs/workflow-controller-configmap.yaml#L329) in the default values.yaml file in the helm chart. +Firstly, configure the settings [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) with the correct OAuth 2 values. If working towards an OIDC configuration the Argo CD project has [guides](https://argo-cd.readthedocs.io/en/stable/operator-manual/user-management/) on its similar (though different) process for setting up OIDC providers. It also includes examples for specific providers. The main difference is that the Argo CD docs mention that their callback address endpoint is `/auth/callback`. For Argo Workflows, the default format is `/oauth2/callback` as shown in [this comment](https://github.com/argoproj/argo-workflows/blob/93c11a24ff06049c2197149acd787f702e5c1f9b/docs/workflow-controller-configmap.yaml#L329) in the default `values.yaml` file in the helm chart. -Next, create the k8s secrets for holding the OAuth2 `client-id` and `client-secret`. You may refer to the kubernetes documentation on [Managing secrets](https://kubernetes.io/docs/tasks/configmap-secret/). For example by using kubectl with literals: -``` +Next, create the Kubernetes secrets for holding the OAuth2 `client-id` and `client-secret`. You may refer to the kubernetes documentation on [Managing secrets](https://kubernetes.io/docs/tasks/configmap-secret/). For example by using `kubectl` with literals: + +```bash kubectl create secret -n argo generic client-id-secret \ --from-literal=client-id-key=foo @@ -21,7 +20,7 @@ kubectl create secret -n argo generic client-secret-secret \ Then, start the Argo Server using the SSO [auth mode](argo-server-auth-mode.md): -``` +```bash argo server --auth-mode sso --auth-mode ... ``` @@ -31,11 +30,11 @@ argo server --auth-mode sso --auth-mode ... As of v2.12 we issue a JWE token for users rather than give them the ID token from your OAuth2 provider. This token is opaque and has a longer expiry time (10h by default). -The token encryption key is automatically generated by the Argo Server and stored in a Kubernetes secret name "sso". +The token encryption key is automatically generated by the Argo Server and stored in a Kubernetes secret name `sso`. You can revoke all tokens by deleting the encryption key and restarting the Argo Server (so it generates a new key). -``` +```bash kubectl delete secret sso ``` @@ -44,7 +43,6 @@ kubectl delete secret sso All users will need to log in again. Sorry. - ## SSO RBAC > v2.12 and after @@ -55,7 +53,7 @@ To allow service accounts to manage resources in other namespaces create a role RBAC config is installation-level, so any changes will need to be made by the team that installed Argo. Many complex rules will be burdensome on that team. -Firstly, enable the `rbac:` setting in [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml). You almost certainly want to be able configure RBAC using groups, so add `scopes:` to the SSO settings: +Firstly, enable the `rbac:` setting in [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml). You almost certainly want to be able to configure RBAC using groups, so add `scopes:` to the SSO settings: ```yaml sso: @@ -96,35 +94,33 @@ metadata: workflows.argoproj.io/rbac-rule-precedence: "1" ``` - If no rule matches, we deny the user access. -!!! Tip - You'll probably want to configure a default account to use if no other rule matches, e.g. a read-only account, you can do this as follows: +TIp: You'll probably want to configure a default account to use if no other rule matches, e.g. a read-only account, you can do this as follows: - ```yaml - metadata: - name: read-only - annotations: - workflows.argoproj.io/rbac-rule: "true" - workflows.argoproj.io/rbac-rule-precedence: "0" - ``` +```yaml +metadata: + name: read-only + annotations: + workflows.argoproj.io/rbac-rule: "true" + workflows.argoproj.io/rbac-rule-precedence: "0" +``` - The precedence must be the lowest of all your service accounts. +The precedence must be the lowest of all your service accounts. ## SSO RBAC Namespace Delegation > v3.3 and after You can optionally configure RBAC SSO per namespace. -Typically, on organization has a K8s cluster and a central team manages the cluster who is the owner of the cluster. Along with this, there are multiple namespaces which are owned by individual team. This feature would help namespace owners to define RBAC for their own namespace. +Typically, on organization has a Kubernetes cluster and a central team (the owner of the cluster) manages the cluster. Along with this, there are multiple namespaces which are owned by individual teams. This feature would help namespace owners to define RBAC for their own namespace. The feature is currently in beta. To enable the feature, set env variable `SSO_DELEGATE_RBAC_TO_NAMESPACE=true` in your argo-server deployment. -#### Recommended usage +### Recommended usage -Configure a default account in the installation namespace which would allow all users of your organization. We will use this service account to allow a user to login to the cluster. You could optionally add workflow read-only role and rolebinding if you wish to. +Configure a default account in the installation namespace which would allow all users of your organization. We will use this service account to allow a user to login to the cluster. You could optionally add workflow read-only role and role-binding if you wish to. ```yaml apiVersion: v1 @@ -136,8 +132,8 @@ metadata: workflows.argoproj.io/rbac-rule-precedence: "0" ``` -Now, for the the namespace that you own, configure a service account which would allow members of your team to perform operations in your namespace. -Make sure that the precedence of the namespace service account is higher than the precedence of the login service account. Create approprite role that you want to grant to this serviceaccount and bind it with a role-binding. +Now, for the namespace that you own, configure a service account which would allow members of your team to perform operations in your namespace. +Make sure that the precedence of the namespace service account is higher than the precedence of the login service account. Create appropriate role that you want to grant to this service account and bind it with a role-binding. ```yaml apiVersion: v1 @@ -150,26 +146,27 @@ metadata: workflows.argoproj.io/rbac-rule-precedence: "1" ``` -Using this, whenever a user is logged in via SSO and makes a request in 'my-namespace', and the `rbac-rule`matches, we will use this service account to allow the user to perform that operation in the namespace. If no serviceaccount matches in the namespace, the first serviceaccount(`user-default-login`) and its associated role will be used to perform the operation in the namespace. +Using this, whenever a user is logged in via SSO and makes a request in 'my-namespace', and the `rbac-rule`matches, we will use this service account to allow the user to perform that operation in the namespace. If no service account matches in the namespace, the first service account(`user-default-login`) and its associated role will be used to perform the operation in the namespace. ## SSO Login Time > v2.12 and after -By default, your SSO session will expire after 10 hours. You can change this by adding a sessionExpiry value to your [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) under the SSO heading. +By default, your SSO session will expire after 10 hours. You can change this by adding a `sessionExpiry` to your [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) under the SSO heading. ```yaml sso: # Expiry defines how long your login is valid for in hours. (optional) sessionExpiry: 240h ``` + ## Custom claims > v3.1.4 and after If your OIDC provider provides groups information with a claim name other than `groups`, you could configure config-map to specify custom claim name for groups. Argo now arbitrary custom claims and any claim can be used for `expr eval`. However, since group information is displayed in UI, it still needs to be an array of strings with group names as elements. -customClaim in this case will be mapped to `groups` key and we can use the same key `groups` for evaluating our expressions +The `customClaim` in this case will be mapped to `groups` key and we can use the same key `groups` for evaluating our expressions ```yaml sso: @@ -177,15 +174,16 @@ sso: customGroupClaimName: argo_groups ``` -If your OIDC provider provides groups information only using the userInfo endpoint (e.g. OKta), you could configure `userInfoPath` to specify the user info endpoint that contains the groups claim. +If your OIDC provider provides groups information only using the user-info endpoint (e.g. Okta), you could configure `userInfoPath` to specify the user info endpoint that contains the groups claim. + ```yaml sso: userInfoPath: /oauth2/v1/userinfo ``` -#### Example expr +### Example Expression -```shell +```bash # assuming customClaimGroupName: argo_groups workflows.argoproj.io/rbac-rule: "'argo_admins' in groups" ``` diff --git a/docs/argo-server.md b/docs/argo-server.md index 84adcc79998c..eb53d1b294e0 100644 --- a/docs/argo-server.md +++ b/docs/argo-server.md @@ -1,7 +1,5 @@ # Argo Server -![GA](assets/ga.svg) - > v2.5 and after !!! Warning "HTTP vs HTTPS" @@ -31,12 +29,11 @@ Use this mode if: To run locally: -``` +```bash argo server ``` -This will start a server on port 2746 which you can view at [https://localhost:2746](https://localhost:2746). - +This will start a server on port 2746 which you [can view](https://localhost:2746). ## Options @@ -48,10 +45,10 @@ See [auth](argo-server-auth-mode.md). See [managed namespace](managed-namespace.md). -### Base href +### Base HREF -If the server is running behind reverse proxy with a subpath different from `/` (for example, -`/argo`), you can set an alternative subpath with the `--base-href` flag or the `BASE_HREF` +If the server is running behind reverse proxy with a sub-path different from `/` (for example, +`/argo`), you can set an alternative sub-path with the `--base-href` flag or the `BASE_HREF` environment variable. You probably now should [read how to set-up an ingress](#ingress) @@ -60,9 +57,9 @@ You probably now should [read how to set-up an ingress](#ingress) See [TLS](tls.md). -### SSO +### SSO -See [SSO](argo-server-sso.md). See [here](argo-server-sso-argocd.md) about sharing ArgoCD's Dex with ArgoWorkflows. +See [SSO](argo-server-sso.md). See [here](argo-server-sso-argocd.md) about sharing Argo CD's Dex with Argo Workflows. ## Access the Argo Workflows UI @@ -71,27 +68,27 @@ following: ### `kubectl port-forward` -```sh +```bash kubectl -n argo port-forward svc/argo-server 2746:2746 ``` -Then visit: https://127.0.0.1:2746 - +Then visit: ### Expose a `LoadBalancer` Update the service to be of type `LoadBalancer`. -```sh +```bash kubectl patch svc argo-server -n argo -p '{"spec": {"type": "LoadBalancer"}}' ``` Then wait for the external IP to be made available: -```sh +```bash kubectl get svc argo-server -n argo ``` -```sh + +```bash NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE argo-server LoadBalancer 10.43.43.130 172.18.0.2 2746:30008/TCP 18h ``` @@ -102,7 +99,6 @@ You can get ingress working as follows: Add `BASE_HREF` as environment variable to `deployment/argo-server`. Do not forget to add a trailing '/' character. - ```yaml --- apiVersion: apps/v1 @@ -133,7 +129,7 @@ Create a ingress, with the annotation `ingress.kubernetes.io/rewrite-target: /`: >If TLS is enabled (default in v3.0 and after), the ingress controller must be told >that the backend uses HTTPS. The method depends on the ingress controller, e.g. ->Traefik expects an `ingress.kubernetes.io/protocol` annotation, while ingress-nginx +>Traefik expects an `ingress.kubernetes.io/protocol` annotation, while `ingress-nginx` >uses `nginx.ingress.kubernetes.io/backend-protocol` ```yaml @@ -157,15 +153,25 @@ spec: [Learn more](https://github.com/argoproj/argo-workflows/issues/3080) - ## Security Users should consider the following in their set-up of the Argo Server: ### API Authentication Rate Limiting -Argo Server does not perform authenticatinon directly. It delegates this to either the Kubernetes API Server (when `--auth-mode=client`) and the OAuth provider (when `--auth-mode=sso`). In each case, it is recommended that the delegate implements any authentication rate limiting you need. +Argo Server does not perform authentication directly. It delegates this to either the Kubernetes API Server (when `--auth-mode=client`) and the OAuth provider (when `--auth-mode=sso`). In each case, it is recommended that the delegate implements any authentication rate limiting you need. ### IP Address Logging Argo Server does not log the IP addresses of API requests. We recommend you put the Argo Server behind a load balancer, and that load balancer is configured to log the IP addresses of requests that return authentication or authorization errors. + +### Rate Limiting + +> v3.4 and after + +Argo Server by default rate limits to 1000 per IP per minute, you can configure it through `--api-rate-limit`. You can access additional information through the following headers. + +* `X-Rate-Limit-Limit` - the rate limit ceiling that is applicable for the current request. +* `X-Rate-Limit-Remaining` - the number of requests left for the current rate-limit window. +* `X-Rate-Limit-Reset` - the time at which the rate limit resets, specified in UTC time. +* `Retry-After` - indicate when a client should retry requests (when the rate limit expires), in UTC time. diff --git a/docs/artifact-repository-ref.md b/docs/artifact-repository-ref.md index a06c834d56fe..02c5aa97f9e0 100644 --- a/docs/artifact-repository-ref.md +++ b/docs/artifact-repository-ref.md @@ -1,7 +1,5 @@ # Artifact Repository Ref -![GA](assets/ga.svg) - > v2.9 and after You can reduce duplication in your templates by configuring repositories that can be accessed by any workflow. This can also remove sensitive information from your templates. @@ -46,4 +44,4 @@ spec: This feature gives maximum benefit when used with [key-only artifacts](key-only-artifacts.md). -Reference: [fields.md#artifactrepositoryref](fields.md#artifactrepositoryref). \ No newline at end of file +[Reference](fields.md#artifactrepositoryref). diff --git a/docs/artifact-visualization.md b/docs/artifact-visualization.md new file mode 100644 index 000000000000..a0596485284b --- /dev/null +++ b/docs/artifact-visualization.md @@ -0,0 +1,97 @@ +# Artifact Visualization + +> since v3.4 + +Artifacts can be viewed in the UI. + +Use cases: + +* Comparing ML pipeline runs from generated charts. +* Visualizing end results of ML pipeline runs. +* Debugging workflows where visual artifacts are the most helpful. + +[![Demo](https://img.youtube.com/vi/whoRfYY9Fhk/0.jpg)](https://youtu.be/whoRfYY9Fhk) + +* Artifacts appear as elements in the workflow DAG that you can click on. +* When you click on the artifact, a panel appears. +* The first time this appears explanatory text is shown to help you understand if you might need to change your + workflows to use this feature. +* Known file types such as images, text or HTML are shown in an inline-frame (`iframe`). +* Artifacts are sandboxed using a Content-Security-Policy that prevents JavaScript execution. +* JSON is shown with syntax highlighting. + +To start, take a look at +the [example](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) +. + +![Graph Report](assets/graph-report.png) +![Test Report](assets/test-report.png) + +## Artifact Types + +An artifact maybe a `.tgz`, file or directory. + +### `.tgz` + +Viewing of `.tgz` is not supported in the UI. By default artifacts are compressed as a `.tgz`. Only artifacts that were +not compressed can be viewed. + +To prevent compression, set `archive` to `none` to prevent compression: + +```yaml +- name: artifact + # ... + archive: + none: { } +``` + +### File + +Files maybe shown in the UI. To determine if a file can be shown, the UI checks if the artifact's file extension is +supported. The extension is found in the artifact's key. + +To view a file, add the extension to the key: + +```yaml +- name: single-file + s3: + key: visualization.png +``` + +### Directory + +Directories are shown in the UI. The UI considers any key with a trailing-slash to be a directory. + +To view a directory, add a trailing-slash: + +```yaml +- name: reports + s3: + key: reports/ +``` + +If the directory contains `index.html`, then that will be shown, otherwise a directory listing is displayed. + +⚠️ HTML files may contain CSS and images served from the same origin. Scripts are not allowed. Nothing may be remotely +loaded. + +## Security + +### Content Security Policy + +We assume that artifacts are not trusted, so by default, artifacts are served with a `Content-Security-Policy` that +disables JavaScript and remote files. + +This is similar to what happens when you include third-party scripts, such as analytic tracking, in your website. +However, those tracking codes are normally served from a different domain to your main website. Artifacts are served +from the same origin, so normal browser controls are not secure enough. + +### Sub-Path Access + +Previously, users could access the artifacts of any workflows they could access. To allow HTML files to link to other files +within their tree, you can now access any sub-paths of the artifact's key. + +Example: + +The artifact produces a folder in an S3 bucket named `my-bucket`, with a key `report/`. You can also access anything +matching `report/*`. diff --git a/docs/assets/alpha.svg b/docs/assets/alpha.svg deleted file mode 100644 index 471179bef482..000000000000 --- a/docs/assets/alpha.svg +++ /dev/null @@ -1 +0,0 @@ - ALPHAALPHA \ No newline at end of file diff --git a/docs/assets/argo-server-ui-configurations.png b/docs/assets/argo-server-ui-configurations.png new file mode 100644 index 000000000000..a6d84f0ac3e8 Binary files /dev/null and b/docs/assets/argo-server-ui-configurations.png differ diff --git a/docs/assets/artifact-gc-option-2-flow.jpg b/docs/assets/artifact-gc-option-2-flow.jpg new file mode 100644 index 000000000000..7954fc90210b Binary files /dev/null and b/docs/assets/artifact-gc-option-2-flow.jpg differ diff --git a/docs/assets/artifact-gc-proposal.pptx b/docs/assets/artifact-gc-proposal.pptx new file mode 100644 index 000000000000..a78102316960 Binary files /dev/null and b/docs/assets/artifact-gc-proposal.pptx differ diff --git a/docs/assets/beta.svg b/docs/assets/beta.svg deleted file mode 100644 index 29b44088563f..000000000000 --- a/docs/assets/beta.svg +++ /dev/null @@ -1 +0,0 @@ - BETABETA \ No newline at end of file diff --git a/docs/assets/ecosystem.png b/docs/assets/ecosystem.png deleted file mode 100644 index 721373b24e9a..000000000000 Binary files a/docs/assets/ecosystem.png and /dev/null differ diff --git a/docs/assets/ga.svg b/docs/assets/ga.svg deleted file mode 100644 index 3424357bace2..000000000000 --- a/docs/assets/ga.svg +++ /dev/null @@ -1 +0,0 @@ - GAGA \ No newline at end of file diff --git a/docs/assets/graph-report.png b/docs/assets/graph-report.png new file mode 100644 index 000000000000..be845902da1d Binary files /dev/null and b/docs/assets/graph-report.png differ diff --git a/docs/assets/katacoda.png b/docs/assets/katacoda.png deleted file mode 100644 index 0b053ea3126a..000000000000 Binary files a/docs/assets/katacoda.png and /dev/null differ diff --git a/docs/assets/make-start-UI-true.png b/docs/assets/make-start-UI-true.png new file mode 100644 index 000000000000..1b0a0a04a5fb Binary files /dev/null and b/docs/assets/make-start-UI-true.png differ diff --git a/docs/assets/test-report.png b/docs/assets/test-report.png new file mode 100644 index 000000000000..ca204ec9f6b4 Binary files /dev/null and b/docs/assets/test-report.png differ diff --git a/docs/async-pattern.md b/docs/async-pattern.md index 70b98716a16a..5f463902ec12 100644 --- a/docs/async-pattern.md +++ b/docs/async-pattern.md @@ -2,20 +2,20 @@ ## Introduction -If triggering an external job (eg an Amazon EMR job) from Argo that does not run to completion in a container, there are two options: +If triggering an external job (e.g. an Amazon EMR job) from Argo that does not run to completion in a container, there are two options: - create a container that polls the external job completion status -- combine a trigger step that starts the job with a `Suspend` step that is unsuspended by an API call to Argo when the external job is complete. +- combine a trigger step that starts the job with a `Suspend` step that is resumed by an API call to Argo when the external job is complete. This document describes the second option in more detail. ## The pattern -The pattern involves two steps - the first step is a short-running step that triggers a long-running job outside Argo (eg an HTTP submission), and the second step is a `Suspend` step that suspends workflow execution and is ultimately either resumed or stopped (ie failed) via a call to the Argo API when the job outside Argo succeeds or fails. +The pattern involves two steps - the first step is a short-running step that triggers a long-running job outside Argo (e.g. an HTTP submission), and the second step is a `Suspend` step that suspends workflow execution and is ultimately either resumed or stopped (i.e. failed) via a call to the Argo API when the job outside Argo succeeds or fails. When implemented as a `WorkflowTemplate` it can look something like this: -``` +```yaml apiVersion: argoproj.io/v1alpha1 kind: WorkflowTemplate metadata: @@ -56,13 +56,13 @@ spec: suspend: {} ``` -In this case the ```job-cmd``` parameter can be a command that makes an http call via curl to an endpoint that returns a job uuid. More sophisticated submission and parsing of submission output could be done with something like a Python script step. +In this case the ```job-cmd``` parameter can be a command that makes an HTTP call via curl to an endpoint that returns a job UUID. More sophisticated submission and parsing of submission output could be done with something like a Python script step. On job completion the external job would need to call either resume if successful: You may need an [access token](access-token.md). -``` +```bash curl --request PUT \ --url https://localhost:2746/api/v1/workflows///resume --header 'content-type: application/json' \ @@ -76,7 +76,7 @@ curl --request PUT \ or stop if unsuccessful: -``` +```bash curl --request PUT \ --url https://localhost:2746/api/v1/workflows///stop --header 'content-type: application/json' \ @@ -93,15 +93,8 @@ curl --request PUT \ Using `argo retry` on failed jobs that follow this pattern will cause Argo to re-attempt the Suspend step without re-triggering the job. -Instead you need to use the `--restart-successful` option, eg if using the template from above: +Instead you need to use the `--restart-successful` option, e.g. if using the template from above: -``` +```bash argo retry --restart-successful --node-field-selector templateRef.template=run-external-job,phase=Failed ``` - -See also: - -* [access token](access-token.md) -* [resuming a workflow via automation](resuming-workflow-via-automation.md) -* [submitting a workflow via automation](submit-workflow-via-automation.md) -* [one workflow submitting another](workflow-submitting-workflow.md) diff --git a/docs/cli.md b/docs/cli.md deleted file mode 100644 index 8927bfe2e832..000000000000 --- a/docs/cli.md +++ /dev/null @@ -1,32 +0,0 @@ -# CLI - -The CLI allows to (amongst other things) submit, watch, and list workflows, e.g.: - -```sh -argo submit my-wf.yaml -argo list -``` - -## Reference - -You can find [detailed reference here](cli/argo.md) - -## Help - -Most help topics are provided by built-in help: - -``` -argo --help -``` - -## Argo Server - -You'll need to configure your commands to use the Argo Server if you have [offloaded node status](offloading-large-workflows.md) or are trying to access your [workflow archive](workflow-archive.md). - -To do so, set the `ARGO_SERVER` environment variable, e.g.: - -``` -export ARGO_SERVER=localhost:2746 -``` - -See [TLS](tls.md). \ No newline at end of file diff --git a/docs/cli/argo.md b/docs/cli/argo.md index 0e9b9acab9a5..890da67d4f44 100644 --- a/docs/cli/argo.md +++ b/docs/cli/argo.md @@ -90,6 +90,7 @@ argo [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server @@ -106,6 +107,7 @@ argo [flags] * [argo auth](argo_auth.md) - manage authentication settings * [argo cluster-template](argo_cluster-template.md) - manipulate cluster workflow templates * [argo completion](argo_completion.md) - output shell completion code for the specified shell (bash or zsh) +* [argo cp](argo_cp.md) - copy artifacts from workflow * [argo cron](argo_cron.md) - manage cron workflows * [argo delete](argo_delete.md) - delete workflows * [argo executor-plugin](argo_executor-plugin.md) - manage executor plugins diff --git a/docs/cli/argo_archive.md b/docs/cli/argo_archive.md index f935546ef05f..2375b7ff3a3f 100644 --- a/docs/cli/argo_archive.md +++ b/docs/cli/argo_archive.md @@ -35,6 +35,7 @@ argo archive [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server @@ -54,4 +55,5 @@ argo archive [flags] * [argo archive list-label-keys](argo_archive_list-label-keys.md) - list workflows label keys in the archive * [argo archive list-label-values](argo_archive_list-label-values.md) - get workflow label values in the archive * [argo archive resubmit](argo_archive_resubmit.md) - resubmit one or more workflows +* [argo archive retry](argo_archive_retry.md) - retry zero or more workflows diff --git a/docs/cli/argo_archive_delete.md b/docs/cli/argo_archive_delete.md index bc2783932e5a..8381976b866a 100644 --- a/docs/cli/argo_archive_delete.md +++ b/docs/cli/argo_archive_delete.md @@ -35,6 +35,7 @@ argo archive delete UID... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_get.md b/docs/cli/argo_archive_get.md index 7ba33a801b51..9568d375eca8 100644 --- a/docs/cli/argo_archive_get.md +++ b/docs/cli/argo_archive_get.md @@ -36,6 +36,7 @@ argo archive get UID [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_list-label-keys.md b/docs/cli/argo_archive_list-label-keys.md index 7706d6117a8c..f46da1a97622 100644 --- a/docs/cli/argo_archive_list-label-keys.md +++ b/docs/cli/argo_archive_list-label-keys.md @@ -35,6 +35,7 @@ argo archive list-label-keys [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_list-label-values.md b/docs/cli/argo_archive_list-label-values.md index f4f6249da9f3..e41582333515 100644 --- a/docs/cli/argo_archive_list-label-values.md +++ b/docs/cli/argo_archive_list-label-values.md @@ -36,6 +36,7 @@ argo archive list-label-values [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_list.md b/docs/cli/argo_archive_list.md index 2448e1c55c03..f02e35109ff7 100644 --- a/docs/cli/argo_archive_list.md +++ b/docs/cli/argo_archive_list.md @@ -38,6 +38,7 @@ argo archive list [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_resubmit.md b/docs/cli/argo_archive_resubmit.md index 9c1c6976c2b2..0ce78536b3c9 100644 --- a/docs/cli/argo_archive_resubmit.md +++ b/docs/cli/argo_archive_resubmit.md @@ -15,27 +15,27 @@ argo archive resubmit [WORKFLOW...] [flags] # Resubmit multiple workflows: - argo resubmit uid another-uid + argo archive resubmit uid another-uid # Resubmit multiple workflows by label selector: - argo resubmit -l workflows.argoproj.io/test=true + argo archive resubmit -l workflows.argoproj.io/test=true # Resubmit multiple workflows by field selector: - argo resubmit --field-selector metadata.namespace=argo + argo archive resubmit --field-selector metadata.namespace=argo # Resubmit and wait for completion: - argo resubmit --wait uid + argo archive resubmit --wait uid # Resubmit and watch until completion: - argo resubmit --watch uid + argo archive resubmit --watch uid # Resubmit and tail logs until completion: - argo resubmit --log uid + argo archive resubmit --log uid ``` @@ -47,6 +47,7 @@ argo archive resubmit [WORKFLOW...] [flags] --log log the workflow until it completes --memoized re-use successful steps & outputs from the previous run -o, --output string Output format. One of: name|json|yaml|wide + -p, --parameter stringArray input parameter to override on the original workflow spec --priority int32 workflow priority -l, --selector string Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2) -w, --wait wait for the workflow to complete, only works when a single workflow is resubmitted @@ -76,6 +77,7 @@ argo archive resubmit [WORKFLOW...] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_archive_retry.md b/docs/cli/argo_archive_retry.md new file mode 100644 index 000000000000..710bdbbbda9f --- /dev/null +++ b/docs/cli/argo_archive_retry.md @@ -0,0 +1,94 @@ +## argo archive retry + +retry zero or more workflows + +``` +argo archive retry [WORKFLOW...] [flags] +``` + +### Examples + +``` +# Retry a workflow: + + argo archive retry uid + +# Retry multiple workflows: + + argo archive retry uid another-uid + +# Retry multiple workflows by label selector: + + argo archive retry -l workflows.argoproj.io/test=true + +# Retry multiple workflows by field selector: + + argo archive retry --field-selector metadata.namespace=argo + +# Retry and wait for completion: + + argo archive retry --wait uid + +# Retry and watch until completion: + + argo archive retry --watch uid + +# Retry and tail logs until completion: + + argo archive retry --log uid + +``` + +### Options + +``` + --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type. + -h, --help help for retry + --log log the workflow until it completes + --node-field-selector string selector of nodes to reset, eg: --node-field-selector inputs.paramaters.myparam.value=abc + -o, --output string Output format. One of: name|json|yaml|wide + -p, --parameter stringArray input parameter to override on the original workflow spec + --restart-successful indicates to restart successful nodes matching the --node-field-selector + -l, --selector string Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2) + -w, --wait wait for the workflow to complete, only works when a single workflow is retried + --watch watch the workflow until it completes, only works when a single workflow is retried +``` + +### Options inherited from parent commands + +``` + --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. + -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. + --as string Username to impersonate for the operation + --as-group stringArray Group to impersonate for the operation, this flag can be repeated to specify multiple groups. + --as-uid string UID to impersonate for the operation + --certificate-authority string Path to a cert file for the certificate authority + --client-certificate string Path to a client certificate file for TLS + --client-key string Path to a client key file for TLS + --cluster string The name of the kubeconfig cluster to use + --context string The name of the kubeconfig context to use + --gloglevel int Set the glog logging level + -H, --header strings Sets additional header to all requests made by Argo CLI. (Can be repeated multiple times to add multiple headers, also supports comma separated headers) Used only when either ARGO_HTTP1 or --argo-http1 is set to true. + --insecure-skip-tls-verify If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure + -k, --insecure-skip-verify If true, the Argo Server's certificate will not be checked for validity. This will make your HTTPS connections insecure. Defaults to the ARGO_INSECURE_SKIP_VERIFY environment variable. + --instanceid string submit with a specific controller's instance id label. Default to the ARGO_INSTANCEID environment variable. + --kubeconfig string Path to a kube config. Only required if out-of-cluster + --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") + -n, --namespace string If present, the namespace scope for this CLI request + --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy + --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") + -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) + --server string The address and port of the Kubernetes API server + --tls-server-name string If provided, this name will be used to validate server certificate. If this is not provided, hostname used to contact the server is used. + --token string Bearer token for authentication to the API server + --user string The name of the kubeconfig user to use + --username string Username for basic authentication to the API server + -v, --verbose Enabled verbose logging, i.e. --loglevel debug +``` + +### SEE ALSO + +* [argo archive](argo_archive.md) - manage the workflow archive + diff --git a/docs/cli/argo_auth.md b/docs/cli/argo_auth.md index 4e035a0e6c33..e7ca7f211418 100644 --- a/docs/cli/argo_auth.md +++ b/docs/cli/argo_auth.md @@ -35,6 +35,7 @@ argo auth [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_auth_token.md b/docs/cli/argo_auth_token.md index a507b8034a9f..28c320441e6c 100644 --- a/docs/cli/argo_auth_token.md +++ b/docs/cli/argo_auth_token.md @@ -35,6 +35,7 @@ argo auth token [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template.md b/docs/cli/argo_cluster-template.md index 73317ba79daf..5c6c8a5405c9 100644 --- a/docs/cli/argo_cluster-template.md +++ b/docs/cli/argo_cluster-template.md @@ -35,6 +35,7 @@ argo cluster-template [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template_create.md b/docs/cli/argo_cluster-template_create.md index 495b650896eb..4b8d7a40d89b 100644 --- a/docs/cli/argo_cluster-template_create.md +++ b/docs/cli/argo_cluster-template_create.md @@ -37,6 +37,7 @@ argo cluster-template create FILE1 FILE2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template_delete.md b/docs/cli/argo_cluster-template_delete.md index b5341da37782..99c8c2f9c5bc 100644 --- a/docs/cli/argo_cluster-template_delete.md +++ b/docs/cli/argo_cluster-template_delete.md @@ -36,6 +36,7 @@ argo cluster-template delete WORKFLOW_TEMPLATE [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template_get.md b/docs/cli/argo_cluster-template_get.md index 064a7b9cdbb6..89c297589cb9 100644 --- a/docs/cli/argo_cluster-template_get.md +++ b/docs/cli/argo_cluster-template_get.md @@ -36,6 +36,7 @@ argo cluster-template get CLUSTER WORKFLOW_TEMPLATE... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template_lint.md b/docs/cli/argo_cluster-template_lint.md index 547dcc9030e9..49b6a79ad9c5 100644 --- a/docs/cli/argo_cluster-template_lint.md +++ b/docs/cli/argo_cluster-template_lint.md @@ -37,6 +37,7 @@ argo cluster-template lint FILE... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cluster-template_list.md b/docs/cli/argo_cluster-template_list.md index b52018039b67..9526874ab320 100644 --- a/docs/cli/argo_cluster-template_list.md +++ b/docs/cli/argo_cluster-template_list.md @@ -36,6 +36,7 @@ argo cluster-template list [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_completion.md b/docs/cli/argo_completion.md index c3eb79a3741f..136e4abdb49e 100644 --- a/docs/cli/argo_completion.md +++ b/docs/cli/argo_completion.md @@ -48,6 +48,7 @@ argo completion SHELL [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cp.md b/docs/cli/argo_cp.md new file mode 100644 index 000000000000..6cb6bcd0548c --- /dev/null +++ b/docs/cli/argo_cp.md @@ -0,0 +1,69 @@ +## argo cp + +copy artifacts from workflow + +``` +argo cp my-wf output-directory ... [flags] +``` + +### Examples + +``` +# Copy a workflow's artifacts to a local output directory: + + argo cp my-wf output-directory + +# Copy artifacts from a specific node in a workflow to a local output directory: + + argo cp my-wf output-directory --node-id=my-wf-node-id-123 + +``` + +### Options + +``` + --artifact-name string name of output artifact in workflow + -h, --help help for cp + --node-id string id of node in workflow + --path string use variables {workflowName}, {nodeId}, {templateName}, {artifactName}, and {namespace} to create a customized path to store the artifacts; example: {workflowName}/{templateName}/{artifactName} (default "{namespace}/{workflowName}/{nodeId}/outputs/{artifactName}") + --template-name string name of template in workflow +``` + +### Options inherited from parent commands + +``` + --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. + -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. + --as string Username to impersonate for the operation + --as-group stringArray Group to impersonate for the operation, this flag can be repeated to specify multiple groups. + --as-uid string UID to impersonate for the operation + --certificate-authority string Path to a cert file for the certificate authority + --client-certificate string Path to a client certificate file for TLS + --client-key string Path to a client key file for TLS + --cluster string The name of the kubeconfig cluster to use + --context string The name of the kubeconfig context to use + --gloglevel int Set the glog logging level + -H, --header strings Sets additional header to all requests made by Argo CLI. (Can be repeated multiple times to add multiple headers, also supports comma separated headers) Used only when either ARGO_HTTP1 or --argo-http1 is set to true. + --insecure-skip-tls-verify If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure + -k, --insecure-skip-verify If true, the Argo Server's certificate will not be checked for validity. This will make your HTTPS connections insecure. Defaults to the ARGO_INSECURE_SKIP_VERIFY environment variable. + --instanceid string submit with a specific controller's instance id label. Default to the ARGO_INSTANCEID environment variable. + --kubeconfig string Path to a kube config. Only required if out-of-cluster + --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") + -n, --namespace string If present, the namespace scope for this CLI request + --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy + --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") + -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) + --server string The address and port of the Kubernetes API server + --tls-server-name string If provided, this name will be used to validate server certificate. If this is not provided, hostname used to contact the server is used. + --token string Bearer token for authentication to the API server + --user string The name of the kubeconfig user to use + --username string Username for basic authentication to the API server + -v, --verbose Enabled verbose logging, i.e. --loglevel debug +``` + +### SEE ALSO + +* [argo](argo.md) - argo is the command line interface to Argo + diff --git a/docs/cli/argo_cron.md b/docs/cli/argo_cron.md index ef40dcd9816d..b4807f0382da 100644 --- a/docs/cli/argo_cron.md +++ b/docs/cli/argo_cron.md @@ -39,6 +39,7 @@ argo cron [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_create.md b/docs/cli/argo_cron_create.md index b6d260c3ef31..4964e04ef43b 100644 --- a/docs/cli/argo_cron_create.md +++ b/docs/cli/argo_cron_create.md @@ -45,6 +45,7 @@ argo cron create FILE1 FILE2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_delete.md b/docs/cli/argo_cron_delete.md index 00fd42543523..14352346708f 100644 --- a/docs/cli/argo_cron_delete.md +++ b/docs/cli/argo_cron_delete.md @@ -36,6 +36,7 @@ argo cron delete [CRON_WORKFLOW... | --all] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_get.md b/docs/cli/argo_cron_get.md index d004f8703325..73badb4f7af9 100644 --- a/docs/cli/argo_cron_get.md +++ b/docs/cli/argo_cron_get.md @@ -36,6 +36,7 @@ argo cron get CRON_WORKFLOW... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_lint.md b/docs/cli/argo_cron_lint.md index d03e32860450..85768db3cc27 100644 --- a/docs/cli/argo_cron_lint.md +++ b/docs/cli/argo_cron_lint.md @@ -37,6 +37,7 @@ argo cron lint FILE... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_list.md b/docs/cli/argo_cron_list.md index db7161180d5d..09416e1fd673 100644 --- a/docs/cli/argo_cron_list.md +++ b/docs/cli/argo_cron_list.md @@ -37,6 +37,7 @@ argo cron list [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_resume.md b/docs/cli/argo_cron_resume.md index 54b4fdc728c1..d02698edef36 100644 --- a/docs/cli/argo_cron_resume.md +++ b/docs/cli/argo_cron_resume.md @@ -35,6 +35,7 @@ argo cron resume [CRON_WORKFLOW...] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_cron_suspend.md b/docs/cli/argo_cron_suspend.md index 23cffda09004..78ccaf259cf5 100644 --- a/docs/cli/argo_cron_suspend.md +++ b/docs/cli/argo_cron_suspend.md @@ -35,6 +35,7 @@ argo cron suspend CRON_WORKFLOW... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_delete.md b/docs/cli/argo_delete.md index c679bd9b740e..f1e2bada46ca 100644 --- a/docs/cli/argo_delete.md +++ b/docs/cli/argo_delete.md @@ -3,7 +3,7 @@ delete workflows ``` -argo delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmitted] [--prefix PREFIX] [--selector SELECTOR]] [flags] +argo delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmitted] [--prefix PREFIX] [--selector SELECTOR] [--force] ] [flags] ``` ### Examples @@ -26,7 +26,8 @@ argo delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmit -A, --all-namespaces Delete workflows from all namespaces --completed Delete completed workflows --dry-run Do not delete the workflow, only print what would happen - --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type. + --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type. + --force Force delete workflows by removing finalizers -h, --help help for delete --older string Delete completed workflows finished before the specified duration (e.g. 10m, 3h, 1d) --prefix string Delete workflows by prefix @@ -57,6 +58,7 @@ argo delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmit --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_executor-plugin.md b/docs/cli/argo_executor-plugin.md index 5846f6eb4cac..df57b63e03cc 100644 --- a/docs/cli/argo_executor-plugin.md +++ b/docs/cli/argo_executor-plugin.md @@ -35,6 +35,7 @@ argo executor-plugin [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_executor-plugin_build.md b/docs/cli/argo_executor-plugin_build.md index ce2b34a4227b..6134508d21ac 100644 --- a/docs/cli/argo_executor-plugin_build.md +++ b/docs/cli/argo_executor-plugin_build.md @@ -35,6 +35,7 @@ argo executor-plugin build DIR [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_get.md b/docs/cli/argo_get.md index b79dc96e5839..72fc870492d3 100644 --- a/docs/cli/argo_get.md +++ b/docs/cli/argo_get.md @@ -52,6 +52,7 @@ argo get WORKFLOW... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_lint.md b/docs/cli/argo_lint.md index 8b4431daa9b4..3bab859412c3 100644 --- a/docs/cli/argo_lint.md +++ b/docs/cli/argo_lint.md @@ -52,6 +52,7 @@ argo lint FILE... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_list.md b/docs/cli/argo_list.md index 11cea8093726..d8d619c3b182 100644 --- a/docs/cli/argo_list.md +++ b/docs/cli/argo_list.md @@ -12,7 +12,7 @@ argo list [flags] -A, --all-namespaces Show workflows from all namespaces --chunk-size int Return large lists in chunks rather than all at once. Pass 0 to disable. --completed Show completed workflows. Mutually exclusive with --running. - --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type. + --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type. -h, --help help for list --no-headers Don't print headers (default print headers). --older string List completed workflows finished before the specified duration (e.g. 10m, 3h, 1d) @@ -48,6 +48,7 @@ argo list [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_logs.md b/docs/cli/argo_logs.md index 14d0c595006b..fc9f9ffb8a64 100644 --- a/docs/cli/argo_logs.md +++ b/docs/cli/argo_logs.md @@ -77,6 +77,7 @@ argo logs WORKFLOW [POD] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_node.md b/docs/cli/argo_node.md index 18c2952c806f..d291e1eccf19 100644 --- a/docs/cli/argo_node.md +++ b/docs/cli/argo_node.md @@ -52,6 +52,7 @@ argo node ACTION WORKFLOW FLAGS [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_resubmit.md b/docs/cli/argo_resubmit.md index 2f260acc0597..0d32b3e27e35 100644 --- a/docs/cli/argo_resubmit.md +++ b/docs/cli/argo_resubmit.md @@ -51,6 +51,7 @@ argo resubmit [WORKFLOW...] [flags] --log log the workflow until it completes --memoized re-use successful steps & outputs from the previous run -o, --output string Output format. One of: name|json|yaml|wide + -p, --parameter stringArray input parameter to override on the original workflow spec --priority int32 workflow priority -l, --selector string Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2) -w, --wait wait for the workflow to complete, only works when a single workflow is resubmitted @@ -80,6 +81,7 @@ argo resubmit [WORKFLOW...] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_resume.md b/docs/cli/argo_resume.md index 95be9120d8be..70c400ccad09 100644 --- a/docs/cli/argo_resume.md +++ b/docs/cli/argo_resume.md @@ -48,6 +48,7 @@ argo resume WORKFLOW1 WORKFLOW2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_retry.md b/docs/cli/argo_retry.md index ca40841d65de..dd8670100255 100644 --- a/docs/cli/argo_retry.md +++ b/docs/cli/argo_retry.md @@ -51,6 +51,7 @@ argo retry [WORKFLOW...] [flags] --log log the workflow until it completes --node-field-selector string selector of nodes to reset, eg: --node-field-selector inputs.paramaters.myparam.value=abc -o, --output string Output format. One of: name|json|yaml|wide + -p, --parameter stringArray input parameter to override on the original workflow spec --restart-successful indicates to restart successful nodes matching the --node-field-selector -l, --selector string Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2) -w, --wait wait for the workflow to complete, only works when a single workflow is retried @@ -80,6 +81,7 @@ argo retry [WORKFLOW...] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_server.md b/docs/cli/argo_server.md index 7d101d8a11c7..562133e1e66d 100644 --- a/docs/cli/argo_server.md +++ b/docs/cli/argo_server.md @@ -17,6 +17,8 @@ See https://argoproj.github.io/argo-workflows/argo-server/ ``` --access-control-allow-origin string Set Access-Control-Allow-Origin header in HTTP responses. + --allowed-link-protocol stringArray Allowed link protocol in configMap. Used if the allowed configMap links protocol are different from http,https. Defaults to the environment variable ALLOWED_LINK_PROTOCOL (default [http,https]) + --api-rate-limit uint Set limit per IP for api ratelimiter (default 1000) --auth-mode stringArray API server authentication mode. Any 1 or more length permutation of: client,server,sso (default [client]) --basehref string Value for base href in index.html. Used if the server is running behind reverse proxy under subpath different from /. Defaults to the environment variable BASE_HREF. (default "/") -b, --browser enable automatic launching of the browser [local mode] @@ -30,7 +32,6 @@ See https://argoproj.github.io/argo-workflows/argo-server/ --managed-namespace string namespace that watches, default to the installation namespace --namespaced run as namespaced mode -p, --port int Port to listen on (default 2746) - --sso-namespace string namespace that will be used for SSO RBAC. Defaults to installation namespace. Used only in namespaced mode --x-frame-options string Set X-Frame-Options header in HTTP responses. (default "DENY") ``` @@ -57,6 +58,7 @@ See https://argoproj.github.io/argo-workflows/argo-server/ --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_stop.md b/docs/cli/argo_stop.md index 95818ce0783b..c39c73b9d33a 100644 --- a/docs/cli/argo_stop.md +++ b/docs/cli/argo_stop.md @@ -61,6 +61,7 @@ argo stop WORKFLOW WORKFLOW2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_submit.md b/docs/cli/argo_submit.md index d943b03eac54..823c427efa6b 100644 --- a/docs/cli/argo_submit.md +++ b/docs/cli/argo_submit.md @@ -79,6 +79,7 @@ argo submit [FILE... | --from `kind/name] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_suspend.md b/docs/cli/argo_suspend.md index a83352431403..daa8119ff96b 100644 --- a/docs/cli/argo_suspend.md +++ b/docs/cli/argo_suspend.md @@ -47,6 +47,7 @@ argo suspend WORKFLOW1 WORKFLOW2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template.md b/docs/cli/argo_template.md index a56eb17b6d94..bbfc6a438c8d 100644 --- a/docs/cli/argo_template.md +++ b/docs/cli/argo_template.md @@ -35,6 +35,7 @@ argo template [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template_create.md b/docs/cli/argo_template_create.md index 048f702a8660..953f408f3be8 100644 --- a/docs/cli/argo_template_create.md +++ b/docs/cli/argo_template_create.md @@ -37,6 +37,7 @@ argo template create FILE1 FILE2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template_delete.md b/docs/cli/argo_template_delete.md index 22cad2abc9c9..74c8077b4494 100644 --- a/docs/cli/argo_template_delete.md +++ b/docs/cli/argo_template_delete.md @@ -36,6 +36,7 @@ argo template delete WORKFLOW_TEMPLATE [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template_get.md b/docs/cli/argo_template_get.md index 68f65264f743..ee0f42f4751c 100644 --- a/docs/cli/argo_template_get.md +++ b/docs/cli/argo_template_get.md @@ -36,6 +36,7 @@ argo template get WORKFLOW_TEMPLATE... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template_lint.md b/docs/cli/argo_template_lint.md index c3f9898507e2..65ac604d8a77 100644 --- a/docs/cli/argo_template_lint.md +++ b/docs/cli/argo_template_lint.md @@ -37,6 +37,7 @@ argo template lint (DIRECTORY | FILE1 FILE2 FILE3...) [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_template_list.md b/docs/cli/argo_template_list.md index 5bf7d97d5852..a1dbf6aa5397 100644 --- a/docs/cli/argo_template_list.md +++ b/docs/cli/argo_template_list.md @@ -37,6 +37,7 @@ argo template list [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_terminate.md b/docs/cli/argo_terminate.md index 1e986ff3770b..463c84c38b02 100644 --- a/docs/cli/argo_terminate.md +++ b/docs/cli/argo_terminate.md @@ -31,7 +31,7 @@ argo terminate WORKFLOW WORKFLOW2... [flags] ``` --dry-run Do not terminate the workflow, only print what would happen - --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selectorkey1=value1,key2=value2). The server only supports a limited number of field queries per type. + --field-selector string Selector (field query) to filter on, supports '=', '==', and '!='.(e.g. --field-selector key1=value1,key2=value2). The server only supports a limited number of field queries per type. -h, --help help for terminate -l, --selector string Selector (label query) to filter on, not including uninitialized ones, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2) ``` @@ -59,6 +59,7 @@ argo terminate WORKFLOW WORKFLOW2... [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_version.md b/docs/cli/argo_version.md index 5325fdcaa648..a780cc7483d6 100644 --- a/docs/cli/argo_version.md +++ b/docs/cli/argo_version.md @@ -36,6 +36,7 @@ argo version [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_wait.md b/docs/cli/argo_wait.md index 359e6c7fd231..76d5d459afc6 100644 --- a/docs/cli/argo_wait.md +++ b/docs/cli/argo_wait.md @@ -49,6 +49,7 @@ argo wait [WORKFLOW...] [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/cli/argo_watch.md b/docs/cli/argo_watch.md index a3b307c8e092..3ff9c999a5c6 100644 --- a/docs/cli/argo_watch.md +++ b/docs/cli/argo_watch.md @@ -50,6 +50,7 @@ argo watch WORKFLOW [flags] --loglevel string Set the logging level. One of: debug|info|warn|error (default "info") -n, --namespace string If present, the namespace scope for this CLI request --password string Password for basic authentication to the API server + --proxy-url string If provided, this URL will be used to connect via proxy --request-timeout string The length of time to wait before giving up on a single server request. Non-zero values should contain a corresponding time unit (e.g. 1s, 2m, 3h). A value of zero means don't timeout requests. (default "0") -e, --secure Whether or not the server is using TLS with the Argo Server. Defaults to the ARGO_SECURE environment variable. (default true) --server string The address and port of the Kubernetes API server diff --git a/docs/client-libraries.md b/docs/client-libraries.md index e30203281244..15b62236ecf9 100644 --- a/docs/client-libraries.md +++ b/docs/client-libraries.md @@ -11,18 +11,17 @@ Client libraries often handle common tasks such as authentication for you. The following client libraries are officially maintained by the Argo team. -| Language | Client Library | Examples/Docs | -|----------|----------------|---------------| -| Golang | [apiclient.go](https://github.com/argoproj/argo-workflows/blob/master/pkg/apiclient/apiclient.go) | [Example](https://github.com/argoproj/argo-workflows/blob/master/cmd/argo/commands/submit.go) -| Java | [Java](https://github.com/argoproj/argo-workflows/blob/master/sdks/java) | | -| Python | [Python](https://github.com/argoproj/argo-workflows/blob/master/sdks/python) | [Examples](https://github.com/argoproj/argo-workflows/tree/master/sdks/python/examples)/[Docs](https://github.com/argoproj/argo-workflows/tree/master/sdks/python/client/docs) | +| Language | Client Library | Examples/Docs | +|----------|-----------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------| +| Golang | [`apiclient.go`](https://github.com/argoproj/argo-workflows/blob/master/pkg/apiclient/apiclient.go) | [Example](https://github.com/argoproj/argo-workflows/blob/master/cmd/argo/commands/submit.go) | +| Java | [Java](https://github.com/argoproj/argo-workflows/blob/master/sdks/java) | | +| Python | [Python](https://github.com/argoproj/argo-workflows/blob/master/sdks/python) | | ## Community-maintained client libraries The following client libraries are provided and maintained by their authors, not the Argo team. -| Language | Client Library | Examples/Docs | -|----------|----------------|---------------| -| Python | [Couler](https://github.com/couler-proj/couler) | Multi-workflow engine support Python SDK | -| Python | [Hera](https://github.com/argoproj-labs/hera-workflows) | Easy and accessible Argo workflows construction and submission in Python | - +| Language | Client Library | Examples/Docs | +|----------|---------------------------------------------------------|--------------------------------------------------------------------------| +| Python | [Couler](https://github.com/couler-proj/couler) | Multi-workflow engine support Python SDK | +| Python | [Hera](https://github.com/argoproj-labs/hera-workflows) | Easy and accessible Argo workflows construction and submission in Python | diff --git a/docs/cluster-workflow-templates.md b/docs/cluster-workflow-templates.md index fe6541df4078..a56070f61909 100644 --- a/docs/cluster-workflow-templates.md +++ b/docs/cluster-workflow-templates.md @@ -4,8 +4,8 @@ ## Introduction -`ClusterWorkflowTemplates` are cluster scoped `WorkflowTemplates`. `ClusterWorkflowTemplate` -can be created cluster scoped like `ClusterRole` and can be accessed all namespaces in the cluster. +`ClusterWorkflowTemplates` are cluster scoped `WorkflowTemplates`. `ClusterWorkflowTemplate` +can be created cluster scoped like `ClusterRole` and can be accessed across all namespaces in the cluster. `WorkflowTemplates` documentation [link](./workflow-templates.md) @@ -30,11 +30,11 @@ spec: ## Referencing other `ClusterWorkflowTemplates` -You can reference `templates` from another `ClusterWorkflowTemplates` using a `templateRef` field with `clusterScope: true` . +You can reference `templates` from other `ClusterWorkflowTemplates` using a `templateRef` field with `clusterScope: true` . Just as how you reference other `templates` within the same `Workflow`, you should do so from a `steps` or `dag` template. Here is an example: -More examples []() + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -55,18 +55,22 @@ spec: - name: message value: "hello world" ``` + > 2.9 and after -#### Create `Workflow` from `ClusterWorkflowTemplate` Spec -You can create `Workflow` from `ClusterWorkflowTemplate` spec using `workflowTemplateRef` with `clusterScope: true`. If you pass the arguments to created `Workflow`, it will be merged with ClusterWorkflowTemplate arguments + +### Create `Workflow` from `ClusterWorkflowTemplate` Spec + +You can create `Workflow` from `ClusterWorkflowTemplate` spec using `workflowTemplateRef` with `clusterScope: true`. If you pass the arguments to created `Workflow`, it will be merged with cluster workflow template arguments Here is an example for `ClusterWorkflowTemplate` with `entrypoint` and `arguments` + ```yaml apiVersion: argoproj.io/v1alpha1 kind: ClusterWorkflowTemplate metadata: name: cluster-workflow-template-submittable spec: - entryPoint: whalesay-template + entrypoint: whalesay-template arguments: parameters: - name: message @@ -82,7 +86,9 @@ spec: args: ["{{inputs.parameters.message}}"] ``` + Here is an example for creating `ClusterWorkflowTemplate` as Workflow with passing `entrypoint` and `arguments` to `ClusterWorkflowTemplate` + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -100,6 +106,7 @@ spec: ``` Here is an example of a creating `WorkflowTemplate` as Workflow and using `WorkflowTemplates`'s `entrypoint` and `Workflow Arguments` + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -112,28 +119,27 @@ spec: ``` - - ## Managing `ClusterWorkflowTemplates` ### CLI You can create some example templates as follows: -``` +```bash argo cluster-template create https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/cluster-workflow-template/clustertemplates.yaml ``` The submit a workflow using one of those templates: -``` +```bash argo submit https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml ``` > 2.7 and after > The submit a `ClusterWorkflowTemplate` as a `Workflow`: -```sh + +```bash argo submit --from clusterworkflowtemplate/workflow-template-submittable ``` diff --git a/docs/conditional-artifacts-parameters.md b/docs/conditional-artifacts-parameters.md index 2a10d6a166a5..3b33c843f520 100644 --- a/docs/conditional-artifacts-parameters.md +++ b/docs/conditional-artifacts-parameters.md @@ -33,7 +33,7 @@ under step/DAG level output parameter. Both use the ## Conditional Parameters -```yaml +```yaml - name: coinflip steps: - - name: flip-coin @@ -61,12 +61,12 @@ Convenient functions added to support more use cases: 1. `asInt` - convert the string to integer (e.g: `asInt('1')`) 2. `asFloat` - convert the string to Float (e.g: `asFloat('1.23')`) 3. `string` - convert the int/float to string (e.g: `string(1)`) -4. `jsonpath` - Extract the element from Json using jsonpath ( +4. `jsonpath` - Extract the element from JSON using JSON Path ( e.g: `jsonpath('{"employee":{"name":"sonoo","salary":56000,"married":true}}", "$.employee.name" )` ) 5. [Sprig](http://masterminds.github.io/sprig/) - Support all `sprig` functions * [Advanced example: fibonacci Sequence](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/fibonacci-seq-conditional-param.yaml) -!!! NOTE +!!! NOTE Expressions will decode the `-` as operator if template name has `-`, it will fail the expression. So here solution for template name which has `-` in its name. `step['one-two-three'].outputs.artifacts` diff --git a/docs/configure-archive-logs.md b/docs/configure-archive-logs.md index f6b6f66fb5e5..023f66366c4b 100644 --- a/docs/configure-archive-logs.md +++ b/docs/configure-archive-logs.md @@ -1,25 +1,27 @@ # Configuring Archive Logs -To enable automatic pipeline logging, you need to configure ***archiveLogs*** at workflow-controller configmap, workflow spec, or template level. You also need to configure [Artifact Repository](configure-artifact-repository.md) to define where this logging artifact is stored. +⚠️ We do not recommend you rely on Argo Workflows to archive logs. Instead, use a conventional Kubernetes logging facility. + +To enable automatic pipeline logging, you need to configure `archiveLogs` at workflow-controller config-map, workflow spec, or template level. You also need to configure [Artifact Repository](configure-artifact-repository.md) to define where this logging artifact is stored. Archive logs follows priorities: workflow-controller config (on) > workflow spec (on/off) > template (on/off) -| Controller Configmap | Workflow Spec | Template | are we archiving logs? | -|---|---|---|---| -| true | true | true | true | -| true | true | false | true | -| true | false | true | true | -| true | false | false | true | -| false | true | true | true | -| false | true | false | false | -| false | false | true | true | -| false | false | false | false | +| Controller Config Map | Workflow Spec | Template | are we archiving logs? | +|-----------------------|---------------|----------|------------------------| +| true | true | true | true | +| true | true | false | true | +| true | false | true | true | +| true | false | false | true | +| false | true | true | true | +| false | true | false | false | +| false | false | true | true | +| false | false | false | false | -## Configuring Workflow Controller Configmap +## Configuring Workflow Controller Config Map -See [Workflow Controller Configmap](workflow-controller-configmap.md) +See [Workflow Controller Config Map](workflow-controller-configmap.md) ## Configuring Workflow Spec diff --git a/docs/configure-artifact-repository.md b/docs/configure-artifact-repository.md index 8cb109215d8b..41a442a4eff6 100644 --- a/docs/configure-artifact-repository.md +++ b/docs/configure-artifact-repository.md @@ -2,12 +2,13 @@ To run Argo workflows that use artifacts, you must configure and use an artifact repository. Argo supports any S3 compatible artifact repository such as AWS, GCS -and Minio. This section shows how to configure the artifact repository. +and MinIO. This section shows how to configure the artifact repository. Subsequent sections will show how to use it. | Name | Inputs | Outputs | Usage (Feb 2020) | |---|---|---|---| | Artifactory | Yes | Yes | 11% | +| Azure Blob | Yes | Yes | - | | GCS | Yes | Yes | - | | Git | Yes | No | - | | HDFS | Yes | Yes | 3% | @@ -19,39 +20,41 @@ Subsequent sections will show how to use it. The actual repository used by a workflow is chosen by the following rules: 1. Anything explicitly configured using [Artifact Repository Ref](artifact-repository-ref.md). This is the most flexible, safe, and secure option. -2. From a config map named `artifact-repositories` if it has the `workflows.argoproj.io/default-artifact-repository` annotation in the workflow's namespace. -3. From a workflow controller configmap. +2. From a config map named `artifact-repositories` if it has the `workflows.argoproj.io/default-artifact-repository` annotation in the workflow's namespace. +3. From a workflow controller config-map. -## Configuring Minio +## Configuring MinIO -``` -$ brew install helm # mac, helm 3.x -$ helm repo add minio https://helm.min.io/ # official minio Helm charts -$ helm repo update -$ helm install argo-artifacts minio/minio --set service.type=LoadBalancer --set fullnameOverride=argo-artifacts +NOTE: MinIO is already included in the [quick-start manifests](quick-start.md). + +```bash +brew install helm # mac, helm 3.x +helm repo add minio https://helm.min.io/ # official minio Helm charts +helm repo update +helm install argo-artifacts minio/minio --set service.type=LoadBalancer --set fullnameOverride=argo-artifacts ``` -Login to the Minio UI using a web browser (port 9000) after obtaining the +Login to the MinIO UI using a web browser (port 9000) after obtaining the external IP using `kubectl`. -``` -$ kubectl get service argo-artifacts +```bash +kubectl get service argo-artifacts ``` On Minikube: -``` -$ minikube service --url argo-artifacts +```bash +minikube service --url argo-artifacts ``` -NOTE: When minio is installed via Helm, it generates +NOTE: When MinIO is installed via Helm, it generates credentials, which you will use to login to the UI: Use the commands shown below to see the credentials -- AccessKey: kubectl get secret argo-artifacts -o jsonpath='{.data.accesskey}' | base64 --decode -- SecretKey: kubectl get secret argo-artifacts -o jsonpath='{.data.secretkey}' | base64 --decode +- `AccessKey`: `kubectl get secret argo-artifacts -o jsonpath='{.data.accesskey}' | base64 --decode` +- `SecretKey`: `kubectl get secret argo-artifacts -o jsonpath='{.data.secretkey}' | base64 --decode` -Create a bucket named `my-bucket` from the Minio UI. +Create a bucket named `my-bucket` from the MinIO UI. ## Configuring AWS S3 @@ -62,7 +65,7 @@ an access key, you will need to create a user with just the permissions you want to associate with the access key. Otherwise, you can just create an access key using your existing user account. -``` +```bash $ export mybucket=bucket249 $ cat > policy.json < policy.json < access-key.json ``` +If you have Artifact Garbage Collection configured, you should also add "s3:DeleteObject" to the list of Actions above. + NOTE: if you want argo to figure out which region your buckets belong in, you must additionally set the following statement policy. Otherwise, you must specify a bucket region in your workflow configuration. -``` - ... +```json { "Effect":"Allow", "Action":[ @@ -104,7 +109,7 @@ specify a bucket region in your workflow configuration. ## Configuring GCS (Google Cloud Storage) Create a bucket from the GCP Console -(https://console.cloud.google.com/storage/browser). +(). There are 2 ways to configure a Google Cloud Storage. @@ -135,10 +140,10 @@ artifacts: ``` If it's a GKE cluster, and Workload Identity is configured, there's no need to -create the Service Account key and store it as a K8s secret, +create the service account key and store it as a Kubernetes secret, `serviceAccountKeySecret` is also not needed in this case. Please follow the link to configure Workload Identity -(https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity). +(). ### Use S3 APIs @@ -146,7 +151,7 @@ Enable S3 compatible access and create an access key. Note that S3 compatible access is on a per project rather than per bucket basis. - Navigate to Storage > Settings - (https://console.cloud.google.com/storage/settings). + (). - Enable interoperability access if needed. - Create a new key if needed. - Configure `s3` artifact as following example. @@ -173,61 +178,162 @@ artifacts: ## Configuring Alibaba Cloud OSS (Object Storage Service) -To configure artifact storage for Alibaba Cloud OSS, please first follow -the [official documentation](https://www.alibabacloud.com/product/oss) to set up -an OSS account and bucket. +Create your bucket and access key for the bucket. Suggest to limit the permission +for the access key, you will need to create a user with just the permissions you +want to associate with the access key. Otherwise, you can just create an access key +using your existing user account. -Once it's set up, you can find endpoint and bucket -information on your OSS dashboard and then use them like the following to -configure the artifact storage for your workflow: +Setup [Alibaba Cloud CLI](https://www.alibabacloud.com/help/en/alibaba-cloud-cli/latest/product-introduction) +and follow the steps to configure the artifact storage for your workflow: -```yaml -artifacts: - - name: my-art - path: /my-artifact +```bash +$ export mybucket=bucket-workflow-artifect +$ export myregion=cn-zhangjiakou +$ # limit permission to read/write the bucket. +$ cat > policy.json < access-key.json +$ # create secret in demo namespace, replace demo with your namespace. +$ kubectl create secret generic $mybucket-credentials -n demo\ + --from-literal "accessKey=$(cat access-key.json | jq -r .AccessKey.AccessKeyId)" \ + --from-literal "secretKey=$(cat access-key.json | jq -r .AccessKey.AccessKeySecret)" +$ # create configmap to config default artifact for a namespace. +$ cat > default-artifact-repository.yaml << EOF +apiVersion: v1 +kind: ConfigMap +metadata: + # If you want to use this config map by default, name it "artifact-repositories". Otherwise, you can provide a reference to a + # different config map in `artifactRepositoryRef.configMap`. + name: artifact-repositories + annotations: + # v3.0 and after - if you want to use a specific key, put that key into this annotation. + workflows.argoproj.io/default-artifact-repository: default-oss-artifact-repository +data: + default-oss-artifact-repository: | oss: - endpoint: http://oss-cn-hangzhou-zmf.aliyuncs.com - bucket: test-bucket-name - key: test/mydirectory/ # this is path in the bucket + endpoint: http://oss-cn-zhangjiakou-internal.aliyuncs.com + bucket: $mybucket # accessKeySecret and secretKeySecret are secret selectors. - # It references the k8s secret named 'my-oss-credentials'. + # It references the k8s secret named 'bucket-workflow-artifect-credentials'. # This secret is expected to have have the keys 'accessKey' # and 'secretKey', containing the base64 encoded credentials # to the bucket. accessKeySecret: - name: my-oss-credentials + name: $mybucket-credentials key: accessKey secretKeySecret: - name: my-oss-credentials + name: $mybucket-credentials key: secretKey +EOF +# create cm in demo namespace, replace demo with your namespace. +$ k apply -f default-artifact-repository.yaml -n demo ``` You can also set `createBucketIfNotPresent` to `true` to tell the artifact driver to automatically create the OSS bucket if it doesn't exist yet when saving artifacts. Note that you'll need to set additional permission for your OSS account to create new buckets. -# Configure the Default Artifact Repository +## Configuring Azure Blob Storage + +Create an Azure Storage account and a container within that account. There are a number of +ways to accomplish this, including the [Azure Portal](https://portal.azure.com) or the +[CLI](https://docs.microsoft.com/en-us/cli/azure/). + +1. Retrieve the blob service endpoint for the storage account. For example: + + ```bash + az storage account show -n mystorageaccountname --query 'primaryEndpoints.blob' -otsv + ``` + +2. Retrieve the access key for the storage account. For example: + + ```bash + az storage account keys list -n mystorageaccountname --query '[0].value' -otsv + ``` + +3. Create a kubernetes secret to hold the storage account key. For example: + + ```bash + kubectl create secret generic my-azure-storage-credentials \ + --from-literal "account-access-key=$(az storage account keys list -n mystorageaccountname --query '[0].value' -otsv)" + ``` + +4. Configure `azure` artifact as following in the yaml. + +```yaml +artifacts: + - name: message + path: /tmp/message + azure: + endpoint: https://mystorageaccountname.blob.core.windows.net + container: my-container-name + blob: path/in/container + # accountKeySecret is a secret selector. + # It references the k8s secret named 'my-azure-storage-credentials'. + # This secret is expected to have have the key 'account-access-key', + # containing the base64 encoded credentials to the storage account. + # + # If a managed identity has been assigned to the machines running the + # workflow (e.g., https://docs.microsoft.com/en-us/azure/aks/use-managed-identity) + # then accountKeySecret is not needed, and useSDKCreds should be + # set to true instead: + # useSDKCreds: true + accountKeySecret: + name: my-azure-storage-credentials + key: account-access-key +``` + +If `useSDKCreds` is set to `true`, then the `accountKeySecret` value is not +used and authentication with Azure will be attempted using a +[`DefaultAzureCredential`](https://docs.microsoft.com/en-us/azure/developer/go/azure-sdk-authentication) +instead. + +## Configure the Default Artifact Repository In order for Argo to use your artifact repository, you can configure it as the default repository. Edit the workflow-controller config map with the correct endpoint and access/secret keys for your repository. -## S3 compatible artifact repository bucket (such as AWS, GCS, Minio, and Alibaba Cloud OSS) +### S3 compatible artifact repository bucket (such as AWS, GCS, MinIO, and Alibaba Cloud OSS) Use the `endpoint` corresponding to your provider: -- AWS: s3.amazonaws.com -- GCS: storage.googleapis.com -- Minio: my-minio-endpoint.default:9000 -- Alibaba Cloud OSS: oss-cn-hangzhou-zmf.aliyuncs.com +- AWS: `s3.amazonaws.com` +- GCS: `storage.googleapis.com` +- MinIO: `my-minio-endpoint.default:9000` +- Alibaba Cloud OSS: `oss-cn-hangzhou-zmf.aliyuncs.com` The `key` is name of the object in the `bucket` The `accessKeySecret` and `secretKeySecret` are secret selectors that reference the specified kubernetes -secret. The secret is expected to have the keys 'accessKey' and 'secretKey', -containing the base64 encoded credentials to the bucket. +secret. The secret is expected to have the keys `accessKey` and `secretKey`, +containing the `base64` encoded credentials to the bucket. For AWS, the `accessKeySecret` and `secretKeySecret` correspond to `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` respectively. -EC2 provides a metadata API via which applications using the AWS SDK may assume +EC2 provides a meta-data API via which applications using the AWS SDK may assume IAM roles associated with the instance. If you are running argo on EC2 and the instance role allows access to your S3 bucket, you can configure the workflow step pods to assume the role. To do so, simply omit the `accessKeySecret` and @@ -238,20 +344,20 @@ can be obtained from the GCP Console. Note that S3 compatible access is on a per project rather than per bucket basis. - Navigate to Storage > Settings - (https://console.cloud.google.com/storage/settings). + (). - Enable interoperability access if needed. - Create a new key if needed. -For Minio, the `accessKeySecret` and `secretKeySecret` naturally correspond the -AccessKey and SecretKey. +For MinIO, the `accessKeySecret` and `secretKeySecret` naturally correspond the +`AccessKey` and `SecretKey`. For Alibaba Cloud OSS, the `accessKeySecret` and `secretKeySecret` corresponds to `accessKeyID` `and accessKeySecret` respectively. Example: -``` -$ kubectl edit configmap workflow-controller-configmap -n argo # assumes argo was installed in the argo namespace +```bash +$ kubectl edit configmap workflow-controller-configmap -n argo # assumes argo was installed in the argo namespace ... data: artifactRepository: | @@ -272,16 +378,16 @@ data: The secrets are retrieved from the namespace you use to run your workflows. Note that you can specify a `keyFormat`. -## Google Cloud Storage (GCS) +### Google Cloud Storage (GCS) Argo also can use native GCS APIs to access a Google Cloud Storage bucket. -`serviceAccountKeySecret` references to a k8 secret which stores a Google Cloud +`serviceAccountKeySecret` references to a Kubernetes secret which stores a Google Cloud service account key to access the bucket. Example: -``` +```bash $ kubectl edit configmap workflow-controller-configmap -n argo # assumes argo was installed in the argo namespace ... data: @@ -294,7 +400,29 @@ data: key: serviceAccountKey ``` -# Accessing Non-Default Artifact Repositories +### Azure Blob Storage + +Argo can use native Azure APIs to access a Azure Blob Storage container. + +`accountKeySecret` references to a Kubernetes secret which stores an Azure Blob +Storage account shared key to access the container. + +Example: + +```bash +$ kubectl edit configmap workflow-controller-configmap -n argo # assumes argo was installed in the argo namespace +... +data: + artifactRepository: | + azure: + container: my-container + blobNameFormat: prefix/in/container #optional, it could reference workflow variables, such as "{{workflow.name}}/{{pod.name}}" + accountKeySecret: + name: my-azure-storage-credentials + key: account-access-key +``` + +## Accessing Non-Default Artifact Repositories This section shows how to access artifacts from non-default artifact repositories. @@ -302,7 +430,7 @@ repositories. The `endpoint`, `accessKeySecret` and `secretKeySecret` are the same as for configuring the default artifact repository described previously. -``` +```yaml templates: - name: artifact-example inputs: diff --git a/docs/container-set-template.md b/docs/container-set-template.md index 5c63624bdf9d..4e792986bd94 100644 --- a/docs/container-set-template.md +++ b/docs/container-set-template.md @@ -1,7 +1,5 @@ # Container Set Template -![GA](assets/ga.svg) - > v3.1 and after A container set templates is similar to a normal container or script template, but allows you to specify multiple @@ -40,7 +38,7 @@ spec: parameters: - name: message valueFrom: - path: /workpsace/message + path: /workspace/message ``` There are a couple of caveats: @@ -68,11 +66,11 @@ Instead, have a workspace volume and make sure all artifacts paths are on that v ## ⚠️ Resource Requests -A container set actually starts all containers, and the Emmissary only starts the main container process when the containers it depends on have completed. This mean that even though the container is doing no useful work, it is still consume resources and you're still getting billed for them. +A container set actually starts all containers, and the Emissary only starts the main container process when the containers it depends on have completed. This mean that even though the container is doing no useful work, it is still consuming resources and you're still getting billed for them. If your requests are small, this won't be a problem. -If your request are large, set the resource requests so the sum total is the most you'll need at once. +If your requests are large, set the resource requests so the sum total is the most you'll need at once. Example A: a simple sequence e.g. `a -> b -> c` @@ -109,7 +107,6 @@ Example B: Lopsided requests, e.g. `a -> b` where `a` is cheap and `b` is expens * `a` needs 100 cpu, 1Mi memory, runs for 10h * `b` needs 8Ki GPU, 100 Gi memory, 200 Ki GPU, runs for 5m -Can you see the problem here? `a` only wont small requests, but the container set will use the total of all requests. So it's as if you're using all that GPU for 10h. This will be expensive. +Can you see the problem here? `a` only has small requests, but the container set will use the total of all requests. So it's as if you're using all that GPU for 10h. This will be expensive. Solution: do not use container set when you have lopsided requests. - diff --git a/docs/core-concepts.md b/docs/core-concepts.md deleted file mode 100644 index 171a96cc5d27..000000000000 --- a/docs/core-concepts.md +++ /dev/null @@ -1,21 +0,0 @@ -# Core Concepts - -!!! note - Please read [Kubernetes concepts](https://kubernetes.io/docs/concepts/) first. - -* **Workflow**: a Kubernetes resource defining the execution of one or more **template**. Workflows are named. -* **Template**: a **step**, **steps** or **dag**. -* **Step**: a single step of a **workflow**, typically run a container based on **inputs** and capture the **outputs**. -* **Steps**: a list of **steps** -* **Entrypoint**: the first **step** to execute when running a **workflow** -* **Node**: a step -* **Directed Acyclic Graph (DAG)**: a set of **steps** (nodes) and the dependencies (edges) between them. -* **Workflow Template**: a Kubernetes resource defining a reusable workflow for a namespace -* **Cluster Workflow Template**: a Kubernetes resource defining a reusable workflow for a cluster -* **Inputs**: **parameters** and **artifacts** passed to the **step**, -* **Outputs**: **parameters** and **artifacts** outputted by a **step** -* **Parameters**: objects, strings, booleans, arrays -* **Artifacts**: files saved by a container -* **Artifact Repository**: a place where **artifacts** are stored -* **Executor**: the method to execute a container, e.g. Docker, PNS ([learn more](workflow-executors.md)) -* **Workflow Service Account**: the service account that a workflow is executed as ([learn more](service-accounts.md)) diff --git a/docs/cost-optimisation.md b/docs/cost-optimisation.md index 46ca2e4fc208..4391beeb735d 100644 --- a/docs/cost-optimisation.md +++ b/docs/cost-optimisation.md @@ -1,16 +1,16 @@ -# Cost Optimisation +# Cost Optimization -## User Cost Optimisations +## User Cost Optimizations Suggestions for users running workflows. -### Set The Workflows Pod Resource Requests +### Set The Workflows Pod Resource Requests -> Suitable if you are running a workflow with many homogenous pods. +> Suitable if you are running a workflow with many homogeneous pods. [Resource duration](resource-duration.md) shows the amount of CPU and memory requested by a pod and is indicative of the cost. You can use this to find costly steps within your workflow. -Smaller requests can be set in the pod spec patch's [resource requirements](fields.md#resourcerequirements). +Smaller requests can be set in the pod spec patch's [resource requirements](fields.md#resourcerequirements). ## Use A Node Selector To Use Cheaper Instances @@ -25,22 +25,23 @@ nodeSelector: > Suitable if you have a workflow that passes a lot of artifacts within itself. -Copying artifacts to and from storage outside of a cluster can be expensive. The correct choice is dependent on your artifact storage provider is vs. what volume they are using. For example, we believe it may be more expensive to allocate and delete a new block storage volume (AWS EBS, GCP persistent disk) every workflow using the PVC feature, than it is to upload and download some small files to object storage (AWS S3, GCP cloud storage). +Copying artifacts to and from storage outside of a cluster can be expensive. The correct choice is dependent on what your artifact storage provider is vs. what volume they are using. For example, we believe it may be more expensive to allocate and delete a new block storage volume (AWS EBS, GCP persistent disk) every workflow using the PVC feature, than it is to upload and download some small files to object storage (AWS S3, GCP cloud storage). -On the other hand if they are using a NFS volume shared between all their workflows with large artifacts, that might be cheaper than the data transfer and storage costs of object storage. +On the other hand if you are using a NFS volume shared between all your workflows with large artifacts, that might be cheaper than the data transfer and storage costs of object storage. Consider: * Data transfer costs (upload/download vs. copying) * Data storage costs (object storage vs. volume) * Requirement for parallel access to data (NFS vs. block storage vs. artifact) + ### Limit The Total Number Of Workflows And Pods > Suitable for all. -A workflow (and for that matter, any Kubernetes resource) will incur a cost as long as they exist in your cluster, even after they are no longer running. +A workflow (and for that matter, any Kubernetes resource) will incur a cost as long as it exists in your cluster, even after it's no longer running. -The workflow controller memory and CPU needs increase linearly with the number of pods and workflows you are currently running. +The workflow controller memory and CPU needs to increase linearly with the number of pods and workflows you are currently running. You should delete workflows once they are no longer needed, or enable a [Workflow Archive](workflow-archive.md) and you can still view them after they are removed from Kubernetes. @@ -68,7 +69,7 @@ You can set these configurations globally using [Default Workflow Spec](default- Changing these settings will not delete workflows that have already run. To list old workflows: -``` +```bash argo list --completed --since 7d ``` @@ -76,12 +77,12 @@ argo list --completed --since 7d To list/delete workflows completed over 7 days ago: -``` +```bash argo list --older 7d argo delete --older 7d ``` -## Operator Cost Optimisations +## Operator Cost Optimizations Suggestions for operators who installed Argo Workflows. @@ -89,7 +90,7 @@ Suggestions for operators who installed Argo Workflows. > Suitable if you have many instances, e.g. on dozens of clusters or namespaces. -Set a resource requests and limits for the `workflow-controller` and `argo-server`, e.g. +Set resource requests and limits for the `workflow-controller` and `argo-server`, e.g. ```yaml requests: diff --git a/docs/cron-backfill.md b/docs/cron-backfill.md index 8eade87f28c5..87d7f93bbea1 100644 --- a/docs/cron-backfill.md +++ b/docs/cron-backfill.md @@ -2,7 +2,7 @@ ## Use Case -* You are using cron workflows to run daily jobs, you may need to re-run for a date, or run some historical days. +* You are using cron workflows to run daily jobs, you may need to re-run for a date, or run some historical days. ## Solution @@ -16,4 +16,3 @@ This [full example](https://raw.githubusercontent.com/argoproj/argo-workflows/ma * A cron workflow named `daily-job`. * A workflow named `backfill-v1` that uses a resource template to create one workflow for each backfill date. * A alternative workflow named `backfill-v2` that uses a steps templates to run one task for each backfill date. - diff --git a/docs/cron-workflows.md b/docs/cron-workflows.md index b13a82269c39..4ab3517ea3c5 100644 --- a/docs/cron-workflows.md +++ b/docs/cron-workflows.md @@ -1,12 +1,10 @@ # Cron Workflows -![GA](assets/ga.svg) - > v2.5 and after ## Introduction -`CronWorkflow` are workflows that run on a preset schedule. They are designed to be converted from `Workflow` easily and to mimick the same options as Kubernetes `CronJob`. In essence, `CronWorkflow` = `Workflow` + some specific cron options. +`CronWorkflow` are workflows that run on a preset schedule. They are designed to be converted from `Workflow` easily and to mimic the same options as Kubernetes `CronJob`. In essence, `CronWorkflow` = `Workflow` + some specific cron options. ## `CronWorkflow` Spec @@ -35,7 +33,7 @@ spec: `CronWorkflow.spec.workflowSpec` is the same type as `Workflow.spec` and serves as a template for `Workflow` objects that are created from it. Everything under this spec will be converted to a `Workflow`. -The resuling `Workflow` name will be a generated name based on the `CronWorkflow` name. In this example it could be something like `test-cron-wf-tj6fe`. +The resulting `Workflow` name will be a generated name based on the `CronWorkflow` name. In this example it could be something like `test-cron-wf-tj6fe`. `CronWorkflow.spec.workflowMetadata` can be used to add `labels` and `annotations`. @@ -43,7 +41,7 @@ The resuling `Workflow` name will be a generated name based on the `CronWorkflow | Option Name | Default Value | Description | |:----------------------------:|:----------------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `schedule` | None, must be provided | Schedule at which the `Workflow` will be run. E.g. `5 4 * * * ` | +| `schedule` | None, must be provided | Schedule at which the `Workflow` will be run. E.g. `5 4 * * *` | | `timezone` | Machine timezone | Timezone during which the Workflow will be run from the IANA timezone standard, e.g. `America/Los_Angeles` | | `suspend` | `false` | If `true` Workflow scheduling will not occur. Can be set from the CLI, GitOps, or directly | | `concurrencyPolicy` | `Allow` | Policy that determines what to do if multiple `Workflows` are scheduled at the same time. Available options: `Allow`: allow all, `Replace`: remove all old before scheduling a new, `Forbid`: do not allow any new while there are old | @@ -51,6 +49,12 @@ The resuling `Workflow` name will be a generated name based on the `CronWorkflow | `successfulJobsHistoryLimit` | `3` | Number of successful `Workflows` that will be persisted at a time | | `failedJobsHistoryLimit` | `1` | Number of failed `Workflows` that will be persisted at a time | +### Cron Schedule Syntax + +The cron scheduler uses the standard cron syntax, as [documented on Wikipedia](https://en.wikipedia.org/wiki/Cron). + +More detailed documentation for the specific library used is [documented here](https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format). + ### Crash Recovery If the `workflow-controller` crashes (and hence the `CronWorkflow` controller), there are some options you can set to ensure that `CronWorkflows` that would have been scheduled while the controller was down can still run. Mainly `startingDeadlineSeconds` can be set to specify the maximum number of seconds past the last successful run of a `CronWorkflow` during which a missed run will still be executed. @@ -65,7 +69,7 @@ This setting can also be configured in tandem with `concurrencyPolicy` to achiev Daylight Saving (DST) is taken into account when using timezone. This means that, depending on the local time of the scheduled job, argo will schedule the workflow once, twice, or not at all when the clock moves forward or back. -For example, with timezone set at `America/Los_Angeles`, we have daylight saving +For example, with timezone set at `America/Los_Angeles`, we have daylight saving - +1 hour (DST start) at 2020-03-08 02:00:00: @@ -73,13 +77,13 @@ For example, with timezone set at `America/Los_Angeles`, we have daylight saving | cron | sequence | workflow execution time | |------------|----------|-------------------------------| - | 59 1 * * * | 1 | 2020-03-08 01:59:00 -0800 PST | + | 59 1 ** * | 1 | 2020-03-08 01:59:00 -0800 PST | | | 2 | 2020-03-09 01:59:00 -0700 PDT | | | 3 | 2020-03-10 01:59:00 -0700 PDT | - | 0 2 * * * | 1 | 2020-03-09 02:00:00 -0700 PDT | + | 0 2 ** * | 1 | 2020-03-09 02:00:00 -0700 PDT | | | 2 | 2020-03-10 02:00:00 -0700 PDT | | | 3 | 2020-03-11 02:00:00 -0700 PDT | - | 1 2 * * * | 1 | 2020-03-09 02:01:00 -0700 PDT | + | 1 2 ** * | 1 | 2020-03-09 02:01:00 -0700 PDT | | | 2 | 2020-03-10 02:01:00 -0700 PDT | | | 3 | 2020-03-11 02:01:00 -0700 PDT | @@ -89,13 +93,13 @@ For example, with timezone set at `America/Los_Angeles`, we have daylight saving | cron | sequence | workflow execution time | |------------|----------|-------------------------------| - | 59 1 * * * | 1 | 2020-11-01 01:59:00 -0700 PDT | + | 59 1 ** * | 1 | 2020-11-01 01:59:00 -0700 PDT | | | 2 | 2020-11-01 01:59:00 -0800 PST | | | 3 | 2020-11-02 01:59:00 -0800 PST | - | 0 2 * * * | 1 | 2020-11-01 02:00:00 -0800 PST | + | 0 2 ** * | 1 | 2020-11-01 02:00:00 -0800 PST | | | 2 | 2020-11-02 02:00:00 -0800 PST | | | 3 | 2020-11-03 02:00:00 -0800 PST | - | 1 2 * * * | 1 | 2020-11-01 02:01:00 -0800 PST | + | 1 2 ** * | 1 | 2020-11-01 02:01:00 -0800 PST | | | 2 | 2020-11-02 02:01:00 -0800 PST | | | 3 | 2020-11-03 02:01:00 -0800 PST | @@ -105,7 +109,7 @@ For example, with timezone set at `America/Los_Angeles`, we have daylight saving `CronWorkflow` can be created from the CLI by using basic commands: -```sh +```bash $ argo cron create cron.yaml Name: test-cron-wf Namespace: argo @@ -144,7 +148,7 @@ Active Workflows: test-cron-wf-rt4nf Using `kubectl apply -f` and `kubectl get cwf` -## Backfilling Days +## Back-Filling Days See [cron backfill](cron-backfill.md). diff --git a/docs/data-sourcing-and-transformation.md b/docs/data-sourcing-and-transformation.md index 2634a025fcda..8eacded5f6b6 100644 --- a/docs/data-sourcing-and-transformation.md +++ b/docs/data-sourcing-and-transformation.md @@ -2,22 +2,22 @@ > v3.1 and after -#### Development - We have intentionally made this feature available with only bare-bones functionality. Our hope is that we are able to build this feature with our community's feedback. If you have ideas and use cases for this feature, please open an [enhancement proposal](https://github.com/argoproj/argo-workflows/issues/new?assignees=&labels=enhancement&template=enhancement_proposal.md) on GitHub. Additionally, please take a look at our current ideas at the bottom of this document. ## Introduction + Users often source and transform data as part of their workflows. The `data` template provides first-class support for these common operations. `data` templates can best be understood by looking at a common data sourcing and transformation operation in `bash`: ```bash -$ find -r . | grep ".pdf" | sed "s/foo/foo.ready/" +find -r . | grep ".pdf" | sed "s/foo/foo.ready/" ``` Such operations consist of two main parts: + * A "source" of data: `find -r .` * A series of "transformations" which transform the output of the source serially: `| grep ".pdf" | sed "s/foo/foo.ready/"` diff --git a/docs/debug-pause.md b/docs/debug-pause.md index 9dbf8dfd57c8..d62ef29c5261 100644 --- a/docs/debug-pause.md +++ b/docs/debug-pause.md @@ -1,4 +1,4 @@ -# Debug pause +# Debug Pause > v3.3 and after @@ -6,8 +6,9 @@ The `debug pause` feature makes it possible to pause individual workflow steps for debugging before, after or both and then release the steps from the paused state. Currently this feature is only supported when using the [Emissary Executor](workflow-executors.md#emissary-emissary) -In order to pause a container env variables are used: -- `ARGO_DEBUG_PAUSE_AFTER` - to pause a step after execution +In order to pause a container env variables are used: + +- `ARGO_DEBUG_PAUSE_AFTER` - to pause a step after execution - `ARGO_DEBUG_PAUSE_BEFORE` - to pause a step before execution Example workflow: @@ -32,9 +33,10 @@ In order to release a step from a pause state, marker files are used named `/var ## Example -1) Create a workflow where the debug pause env in set, in this example `ARGO_DEBUG_PAUSE_AFTER` will be set and thus the step will be paused after execution of the user code. +1) Create a workflow where the debug pause env in set, in this example `ARGO_DEBUG_PAUSE_AFTER` will be set and thus the step will be paused after execution of the user code. pause-after.yaml + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -51,19 +53,19 @@ spec: value: 'true' ``` -```bash +```bash argo submit -n argo --watch pause-after.yaml ``` -2) Create a shell in the container of interest of create a ephemeral container in the pod, in this example ephemeral containers are used. +Create a shell in the container of interest of create a ephemeral container in the pod, in this example ephemeral containers are used. -``` +```bash kubectl debug -n argo -it POD_NAME --image=busybox --target=main --share-processes ``` -In order to have access to the persistence volume used by the workflow step, [`--share-processes`](https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/) will have to be used. +In order to have access to the persistence volume used by the workflow step, [`--share-processes`](https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/) will have to be used. -The ephemeral container can be used to perform debugging operations. When debugging has been completed, create the marker file to allow the workflow step to continue. When using process name space sharing container filesystems are visible to other containers in the pod through the /proc/$pid/root link. +The ephemeral container can be used to perform debugging operations. When debugging has been completed, create the marker file to allow the workflow step to continue. When using process name space sharing container file systems are visible to other containers in the pod through the `/proc/$pid/root` link. ```bash touch /proc/1/root/run/argo/ctr/main/after diff --git a/docs/default-workflow-specs.md b/docs/default-workflow-specs.md index 6fbbc1c0e20b..e5a567143c18 100644 --- a/docs/default-workflow-specs.md +++ b/docs/default-workflow-specs.md @@ -1,7 +1,5 @@ # Default Workflow Spec -![GA](assets/ga.svg) - > v2.7 and after ## Introduction @@ -12,7 +10,7 @@ If a Workflow has a value that also has a default value set in the config map, t ## Setting Default Workflow Values Default Workflow values can be specified by adding them under the `workflowDefaults` key in the [`workflow-controller-configmap`](./workflow-controller-configmap.yaml). -Values can be added as the would under the `Workflow.spec` tag. +Values can be added as they would under the `Workflow.spec` tag. For example, to specify default values that would partially produce the following `Workflow`: diff --git a/docs/disaster-recovery.md b/docs/disaster-recovery.md index 72ae2a3c5a7f..7474a7395be0 100644 --- a/docs/disaster-recovery.md +++ b/docs/disaster-recovery.md @@ -4,15 +4,14 @@ We only store data in your Kubernetes cluster. You should consider backing this Exporting example: -``` -kubectl get wf,cwf,cwft,wftmpl -o yaml > backup.yaml +```bash +kubectl get wf,cwf,cwft,wftmpl -A -o yaml > backup.yaml ``` Importing example: -``` -kubectl apply -f backup.yaml - +```bash +kubectl apply -f backup.yaml ``` -You should also back-up any SQL persistence you use regularly with whatever tool is provided with it. \ No newline at end of file +You should also back-up any SQL persistence you use regularly with whatever tool is provided with it. diff --git a/docs/doc-changes.md b/docs/doc-changes.md new file mode 100644 index 000000000000..df6e369598c7 --- /dev/null +++ b/docs/doc-changes.md @@ -0,0 +1,38 @@ +# Documentation Changes + +Docs help our customers understand how to use workflows and fix their own problems. + +Doc changes are checked for spelling, broken links, and lint issues by CI. To check locally run `make docs`. + +* Explain when you would want to use a feature. +* Provide working examples. +* Use simple short sentences and avoid jargon. +* Format code using back-ticks to avoid it being reported spelling error. +* Avoid use title-case mid-sentence. E.g. instead of "the Workflow", write "the workflow". +* Headings should be title-case. E.g. instead of "and", write "And". + +## Running Locally + +To test/run locally: + +```bash +make docs-serve +``` + +## Tips + +Use a service like [Grammarly](https://www.grammarly.com) to check your grammar. + +Having your computer read text out loud is a way to catch problems, e.g.: + +* Word substitutions (i.e. the wrong word is used, but spelled. +correctly). +* Sentences that do not read correctly will sound wrong. + +On Mac, to set-up: + +* Go to `System Preferences / Accessibility / Spoken Content`. +* Choose a System Voice (I like `Siri Voice 1`). +* Enable `Speak selection`. + +To hear text, select the text you want to hear, then press option+escape. diff --git a/docs/empty-dir.md b/docs/empty-dir.md index 748c748eca2f..95495ad6c9bd 100644 --- a/docs/empty-dir.md +++ b/docs/empty-dir.md @@ -2,12 +2,12 @@ While by default, the Docker and PNS [workflow executors](workflow-executors.md) can get output artifacts/parameters from the base layer (e.g. `/tmp`), neither the Kubelet nor the K8SAPI executors can. It is unlikely you can get output artifacts/parameters from the base layer if you run your workflow pods with a [security context](workflow-pod-security-context.md). -You can work-around this constraint by mounting volumes onto your pod. The easiest way to do this is to use as `emptyDir` volume. +You can work-around this constraint by mounting volumes onto your pod. The easiest way to do this is to use as `emptyDir` volume. -!!! Note +!!! Note This is only needed for output artifacts/parameters. Input artifacts/parameters are automatically mounted to an empty-dir if needed -This example shows how to mount an output volume: +This example shows how to mount an output volume: ```yaml apiVersion: argoproj.io/v1alpha1 diff --git a/docs/enhanced-depends-logic.md b/docs/enhanced-depends-logic.md index 9f360761271c..dc0e6e1e5490 100644 --- a/docs/enhanced-depends-logic.md +++ b/docs/enhanced-depends-logic.md @@ -1,7 +1,5 @@ # Enhanced Depends Logic -![GA](assets/ga.svg) - > v2.9 and after ## Introduction @@ -24,38 +22,39 @@ available task results is as follows: | `.Failed` | Task Failed | Task exited with a non-0 exit code | | `.Errored` | Task Errored | Task had an error other than a non-0 exit code | | `.Skipped` | Task Skipped | Task was skipped | +| `.Omitted` | Task Omitted | Task was omitted | | `.Daemoned` | Task is Daemoned and is not Pending | | For convenience, if an omitted task result is equivalent to `(task.Succeeded || task.Skipped || task.Daemoned)`. For example: -``` +```yaml depends: "task || task-2.Failed" ``` is equivalent to: -``` +```yaml depends: (task.Succeeded || task.Skipped || task.Daemoned) || task-2.Failed ``` Full boolean logic is also available. Operators include: - * `&&` - * `||` - * `!` +* `&&` +* `||` +* `!` Example: -``` +```yaml depends: "(task-2.Succeeded || task-2.Skipped) && !task-3.Failed" ``` -In the case that you're depending on a task that uses withItems, you can depend on -whether any of the item tasks are successful or all have failed using .AnySucceeded and .AllFailed, for example: +In the case that you're depending on a task that uses `withItems`, you can depend on +whether any of the item tasks are successful or all have failed using `.AnySucceeded` and `.AllFailed`, for example: -``` +```yaml depends: "task-1.AnySucceeded || task-2.AllFailed" ``` @@ -65,13 +64,13 @@ This feature is fully compatible with `dependencies` and conversion is easy. To convert simply join your `dependencies` with `&&`: -``` +```yaml dependencies: ["A", "B", "C"] ``` is equivalent to: -``` +```yaml depends: "A && B && C" ``` diff --git a/docs/environment-variables.md b/docs/environment-variables.md index 2ba3a62f864b..41a5dee8d617 100644 --- a/docs/environment-variables.md +++ b/docs/environment-variables.md @@ -1,63 +1,66 @@ # Environment Variables -This document outlines the set of environment variables that can be used to customize the behaviours at different -levels. These environment variables are typically added to test out experimental features and should not be needed by -most users. Note that these environment variables may be removed at any time. +This document outlines the set of environment variables that can be used to customize the behavior at different +levels. + +⚠️ Environment variables are typically added to test out experimental features and should not be used by +most users. Environment variables may be removed at any time. ## Controller -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `ARGO_AGENT_TASK_WORKERS` | `int` | `16` | The number of task workers for the agent pod. | -| `ALL_POD_CHANGES_SIGNIFICANT` | `bool` | `false` | Whether to consider all pod changes as significant during pod reconciliation. | -| `ALWAYS_OFFLOAD_NODE_STATUS` | `bool` | `false` | Whether to always offload the node status. | -| `ARCHIVED_WORKFLOW_GC_PERIOD` | `time.Duration` | `24h` | The periodicity for GC of archived workflows. | -| `ARGO_PPROF` | `bool` | `false` | Enable pprof endpoints | -| `ARGO_PROGRESS_PATCH_TICK_DURATION` | `time.Duration` | `1m` | How often self reported progress is patched into the pod annotations which means how long it takes until the controller picks up the progress change. Set to 0 to disable self reporting progress. | -| `ARGO_PROGRESS_FILE_TICK_DURATION` | `time.Duration` | `3s` | How often the progress file is read by the executor. Set to 0 to disable self reporting progress. | -| `ARGO_REMOVE_PVC_PROTECTION_FINALIZER` | `bool` | `false` | Remove the `kubernetes.io/pvc-protection` finalizer from persistent volume claims (PVC) after marking PVCs created for the workflow for deletion, so deleted is not blocked until the pods are deleted. [#6629](https://github.com/argoproj/argo-workflows/issues/6629) | -| `ARGO_TRACE` | `string` | `"1"` | Whether to enable tracing statements in Argo components. | -| `ARGO_AGENT_PATCH_RATE` | `time.Duration` | `DEFAULT_REQUEUE_TIME` | Rate that the Argo Agent will patch the Workflow TaskSet. | -| `ARGO_AGENT_CPU_LIMIT` | `resource.Quantity` | `100m` | CPU resource limit for the agent. | -| `ARGO_AGENT_MEMORY_LIMIT` | `resource.Quantity` | `256m` | Memory resource limit for the agent. | -| `BUBBLE_ENTRY_TEMPLATE_ERR` | `bool` | `true` | Whether to bubble up template errors to workflow. | -| `CACHE_GC_PERIOD` | `time.Duration` | `0s` | How often to perform memoization cache GC, which is disabled by default and can be enabled by providing a non-zero duration. | -| `CACHE_GC_AFTER_NOT_HIT_DURATION` | `time.Duration` | `30s` | When a memoization cache has not been hit after this duration, it will be deleted. | -| `CRON_SYNC_PERIOD` | `time.Duration` | `10s` | How often to sync cron workflows. | -| `DEFAULT_REQUEUE_TIME` | `time.Duration` | `10s` | The requeue time for the rate limiter of the workflow queue. | -| `EXPRESSION_TEMPLATES` | `bool` | `true` | Escape hatch to disable expression templates. | -| `GRPC_MESSAGE_SIZE` | `string` | Use different GRPC Max message size for Argo server deployment (supporting huge workflows). | -| `GZIP_IMPLEMENTATION` | `string` | `"PGZip"` | The implementation of compression/decompression. Currently only "PGZip" and "GZip" are supported. | -| `INFORMER_WRITE_BACK` | `bool` | `true` | Whether to write back to informer instead of catching up. | -| `HEALTHZ_AGE` | `time.Duration` | `5m` | How old a un-reconciled workflow is to report unhealthy. | -| `INDEX_WORKFLOW_SEMAPHORE_KEYS` | `bool` | `true` | Whether or not to index semaphores. | -| `LEADER_ELECTION_IDENTITY` | `string` | Controller's `metadata.name` | The ID used for workflow controllers to elect a leader. | -| `LEADER_ELECTION_DISABLE` | `bool` | `false` | Whether leader election should be disabled. | -| `LEADER_ELECTION_LEASE_DURATION` | `time.Duration` | `15s` | The duration that non-leader candidates will wait to force acquire leadership. | -| `LEADER_ELECTION_RENEW_DEADLINE` | `time.Duration` | `10s` | The duration that the acting master will retry refreshing leadership before giving up. | -| `LEADER_ELECTION_RETRY_PERIOD` | `time.Duration` | `5s` | The duration that the leader election clients should wait between tries of actions. | -| `MAX_OPERATION_TIME` | `time.Duration` | `30s` | The maximum time a workflow operation is allowed to run for before requeuing the workflow onto the work queue. | -| `OFFLOAD_NODE_STATUS_TTL` | `time.Duration` | `5m` | The TTL to delete the offloaded node status. Currently only used for testing. | -| `POD_NAMES` | `string` | `v2` | Whether to have pod names contain the template name (v2) or be the node id (v1). | -| `RECENTLY_STARTED_POD_DURATION` | `time.Duration` | `10s` | The duration of a pod before the pod is considered to be recently started. | -| `RETRY_BACKOFF_DURATION` | `time.Duration` | `10ms` | The retry backoff duration when retrying API calls. | -| `RETRY_BACKOFF_FACTOR` | `float` | `2.0` | The retry backoff factor when retrying API calls. | -| `RETRY_BACKOFF_STEPS` | `int` | `5` | The retry backoff steps when retrying API calls. | -| `RETRY_HOST_NAME_LABEL_KEY` | `string` | `kubernetes.io/hostname` | The label key for host name used when retrying templates. | -| `TRANSIENT_ERROR_PATTERN` | `string` | `""` | The regular expression that represents additional patterns for transient errors. | -| `WF_DEL_PROPAGATION_POLICY` | `string` | `""` | The deletion propagation policy for workflows. | -| `WORKFLOW_GC_PERIOD` | `time.Duration` | `5m` | The periodicity for GC of workflows. | +| Name | Type | Default | Description | +|----------------------------------------|---------------------|---------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `ARGO_AGENT_TASK_WORKERS` | `int` | `16` | The number of task workers for the agent pod. | +| `ALL_POD_CHANGES_SIGNIFICANT` | `bool` | `false` | Whether to consider all pod changes as significant during pod reconciliation. | +| `ALWAYS_OFFLOAD_NODE_STATUS` | `bool` | `false` | Whether to always offload the node status. | +| `ARCHIVED_WORKFLOW_GC_PERIOD` | `time.Duration` | `24h` | The periodicity for GC of archived workflows. | +| `ARGO_PPROF` | `bool` | `false` | Enable `pprof` endpoints | +| `ARGO_PROGRESS_PATCH_TICK_DURATION` | `time.Duration` | `1m` | How often self reported progress is patched into the pod annotations which means how long it takes until the controller picks up the progress change. Set to 0 to disable self reporting progress. | +| `ARGO_PROGRESS_FILE_TICK_DURATION` | `time.Duration` | `3s` | How often the progress file is read by the executor. Set to 0 to disable self reporting progress. | +| `ARGO_REMOVE_PVC_PROTECTION_FINALIZER` | `bool` | `true` | Remove the `kubernetes.io/pvc-protection` finalizer from persistent volume claims (PVC) after marking PVCs created for the workflow for deletion, so deleted is not blocked until the pods are deleted. [#6629](https://github.com/argoproj/argo-workflows/issues/6629) | +| `ARGO_TRACE` | `string` | `` | Whether to enable tracing statements in Argo components. | +| `ARGO_AGENT_PATCH_RATE` | `time.Duration` | `DEFAULT_REQUEUE_TIME` | Rate that the Argo Agent will patch the workflow task-set. | +| `ARGO_AGENT_CPU_LIMIT` | `resource.Quantity` | `100m` | CPU resource limit for the agent. | +| `ARGO_AGENT_MEMORY_LIMIT` | `resource.Quantity` | `256m` | Memory resource limit for the agent. | +| `BUBBLE_ENTRY_TEMPLATE_ERR` | `bool` | `true` | Whether to bubble up template errors to workflow. | +| `CACHE_GC_PERIOD` | `time.Duration` | `0s` | How often to perform memoization cache GC, which is disabled by default and can be enabled by providing a non-zero duration. | +| `CACHE_GC_AFTER_NOT_HIT_DURATION` | `time.Duration` | `30s` | When a memoization cache has not been hit after this duration, it will be deleted. | +| `CRON_SYNC_PERIOD` | `time.Duration` | `10s` | How often to sync cron workflows. | +| `DEFAULT_REQUEUE_TIME` | `time.Duration` | `10s` | The re-queue time for the rate limiter of the workflow queue. | +| `EXPRESSION_TEMPLATES` | `bool` | `true` | Escape hatch to disable expression templates. | +| `EVENT_AGGREGATION_WITH_ANNOTATIONS` | `bool` | `false` | Whether event annotations will be used when aggregating events. | +| `GRPC_MESSAGE_SIZE` | `string` | Use different GRPC Max message size for Argo server deployment (supporting huge workflows). | +| `GZIP_IMPLEMENTATION` | `string` | `PGZip` | The implementation of compression/decompression. Currently only "`PGZip`" and "`GZip`" are supported. | +| `INFORMER_WRITE_BACK` | `bool` | `true` | Whether to write back to informer instead of catching up. | +| `HEALTHZ_AGE` | `time.Duration` | `5m` | How old a un-reconciled workflow is to report unhealthy. | +| `INDEX_WORKFLOW_SEMAPHORE_KEYS` | `bool` | `true` | Whether or not to index semaphores. | +| `LEADER_ELECTION_IDENTITY` | `string` | Controller's `metadata.name` | The ID used for workflow controllers to elect a leader. | +| `LEADER_ELECTION_DISABLE` | `bool` | `false` | Whether leader election should be disabled. | +| `LEADER_ELECTION_LEASE_DURATION` | `time.Duration` | `15s` | The duration that non-leader candidates will wait to force acquire leadership. | +| `LEADER_ELECTION_RENEW_DEADLINE` | `time.Duration` | `10s` | The duration that the acting master will retry refreshing leadership before giving up. | +| `LEADER_ELECTION_RETRY_PERIOD` | `time.Duration` | `5s` | The duration that the leader election clients should wait between tries of actions. | +| `MAX_OPERATION_TIME` | `time.Duration` | `30s` | The maximum time a workflow operation is allowed to run for before re-queuing the workflow onto the work queue. | +| `OFFLOAD_NODE_STATUS_TTL` | `time.Duration` | `5m` | The TTL to delete the offloaded node status. Currently only used for testing. | +| `POD_NAMES` | `string` | `v2` | Whether to have pod names contain the template name (v2) or be the node id (v1) - should be set the same for Argo Server. | +| `RECENTLY_STARTED_POD_DURATION` | `time.Duration` | `10s` | The duration of a pod before the pod is considered to be recently started. | +| `RETRY_BACKOFF_DURATION` | `time.Duration` | `10ms` | The retry back-off duration when retrying API calls. | +| `RETRY_BACKOFF_FACTOR` | `float` | `2.0` | The retry back-off factor when retrying API calls. | +| `RETRY_BACKOFF_STEPS` | `int` | `5` | The retry back-off steps when retrying API calls. | +| `RETRY_HOST_NAME_LABEL_KEY` | `string` | `kubernetes.io/hostname` | The label key for host name used when retrying templates. | +| `TRANSIENT_ERROR_PATTERN` | `string` | `""` | The regular expression that represents additional patterns for transient errors. | +| `WF_DEL_PROPAGATION_POLICY` | `string` | `""` | The deletion propagation policy for workflows. | +| `WORKFLOW_GC_PERIOD` | `time.Duration` | `5m` | The periodicity for GC of workflows. | CLI parameters of the `argo-server` and `workflow-controller` can be specified as environment variables with the `ARGO_` prefix. For example: -``` +```bash workflow-controller --managed-namespace=argo ``` Can be expressed as: -``` +```bash ARGO_MANAGED_NAMESPACE=argo workflow-controller ``` @@ -115,22 +118,18 @@ spec: ## Executor -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `ARGO_CONTAINER_RUNTIME_EXECUTOR` | `string` | `"docker"` | The name of the container runtime executor. | -| `ARGO_KUBELET_PORT` | `int` | `10250` | The port to the Kubelet API. | -| `ARGO_KUBELET_INSECURE` | `bool` | `false` | Whether to disable the TLS verification. | -| `EXECUTOR_RETRY_BACKOFF_DURATION` | `time.Duration` | `1s` | The retry backoff duration when the workflow executor performs retries. | -| `EXECUTOR_RETRY_BACKOFF_FACTOR` | `float` | `1.6` | The retry backoff factor when the workflow executor performs retries. | -| `EXECUTOR_RETRY_BACKOFF_JITTER` | `float` | `0.5` | The retry backoff jitter when the workflow executor performs retries. | -| `EXECUTOR_RETRY_BACKOFF_STEPS` | `int` | `5` | The retry backoff steps when the workflow executor performs retries. | -| `PNS_PRIVILEGED` | `bool` | `false` | Whether to always set privileged on for PNS when PNS executor is used. | -| `REMOVE_LOCAL_ART_PATH` | `bool` | `false` | Whether to remove local artifacts. | -| `RESOURCE_STATE_CHECK_INTERVAL` | `time.Duration` | `5s` | The time interval between resource status checks against the specified success and failure conditions. | -| `WAIT_CONTAINER_STATUS_CHECK_INTERVAL` | `time.Duration` | `5s` | The time interval for wait container to check whether the containers have completed. | +| Name | Type | Default | Description | +|----------------------------------------|-----------------|---------|--------------------------------------------------------------------------------------------------------| +| `EXECUTOR_RETRY_BACKOFF_DURATION` | `time.Duration` | `1s` | The retry back-off duration when the workflow executor performs retries. | +| `EXECUTOR_RETRY_BACKOFF_FACTOR` | `float` | `1.6` | The retry back-off factor when the workflow executor performs retries. | +| `EXECUTOR_RETRY_BACKOFF_JITTER` | `float` | `0.5` | The retry back-off jitter when the workflow executor performs retries. | +| `EXECUTOR_RETRY_BACKOFF_STEPS` | `int` | `5` | The retry back-off steps when the workflow executor performs retries. | +| `REMOVE_LOCAL_ART_PATH` | `bool` | `false` | Whether to remove local artifacts. | +| `RESOURCE_STATE_CHECK_INTERVAL` | `time.Duration` | `5s` | The time interval between resource status checks against the specified success and failure conditions. | +| `WAIT_CONTAINER_STATUS_CHECK_INTERVAL` | `time.Duration` | `5s` | The time interval for wait container to check whether the containers have completed. | You can set the environment variables for executor by customizing executor container's environment variables in your -controller's configmap like the following: +controller's config-map like the following: ```yaml apiVersion: v1 @@ -147,8 +146,9 @@ data: ## Argo Server -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `FIRST_TIME_USER_MODAL` | `bool` | `true` | Show this modal. | -| `FEEDBACK_MODAL` | `bool` | `true` | Show this modal. | -| `NEW_VERSION_MODAL` | `bool` | `true` | Show this modal. | +| Name | Type | Default | Description | +|-------------------------|----------|---------|------------------------------------------------------------------------------------------------------------------------------| +| `FIRST_TIME_USER_MODAL` | `bool` | `true` | Show this modal. | +| `FEEDBACK_MODAL` | `bool` | `true` | Show this modal. | +| `NEW_VERSION_MODAL` | `bool` | `true` | Show this modal. | +| `POD_NAMES` | `string` | `v2` | Whether to have pod names contain the template name (v2) or be the node id (v1) - should be set the same for Controller | diff --git a/docs/estimated-duration.md b/docs/estimated-duration.md index 439b85e24a94..0af7d1028fbc 100644 --- a/docs/estimated-duration.md +++ b/docs/estimated-duration.md @@ -5,14 +5,14 @@ When you run a workflow, the controller will try to estimate its duration. This is based on the most recently successful workflow submitted from the same workflow template, cluster workflow template or cron workflow. - -To get this data, the controller queries the Kubernetes API first (as this is faster) and then [workflow archive](workflow-archive.md) (if enabled). + +To get this data, the controller queries the Kubernetes API first (as this is faster) and then [workflow archive](workflow-archive.md) (if enabled). If you've used tools like Jenkins, you'll know that that estimates can be inaccurate: * A pod spent a long amount of time pending scheduling. -* The workflow is non-deterministic, e.g. it uses `when` to execute different paths. +* The workflow is non-deterministic, e.g. it uses `when` to execute different paths. * The workflow can vary is scale, e.g. sometimes it uses `withItems` and so sometimes run 100 nodes, sometimes a 1000. * If the pod runtimes are unpredictable. -* The workflow is parameterized, and different parameters affect its duration. - +* The workflow is parametrized, and different parameters affect its duration. + \ No newline at end of file diff --git a/docs/events.md b/docs/events.md index 288568853d72..edb0fd833d64 100644 --- a/docs/events.md +++ b/docs/events.md @@ -1,12 +1,10 @@ # Events -![GA](assets/ga.svg) - > v2.11 and after ## Overview -To support external webhooks, we have this endpoint `/api/v1/events/{namespace}/{discriminator}`. Events can be sent to that can be any JSON data. +To support external webhooks, we have this endpoint `/api/v1/events/{namespace}/{discriminator}`. Events sent to that can be any JSON data. These events can submit *workflow templates* or *cluster workflow templates*. @@ -14,7 +12,7 @@ You may also wish to read about [webhooks](webhooks.md). ## Authentication and Security -Clients wanting to send events to the endpoint need an [access token](access-token.md). +Clients wanting to send events to the endpoint need an [access token](access-token.md). It is only possible to submit workflow templates your access token has access to: [example role](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml). @@ -37,12 +35,12 @@ curl https://localhost:2746/api/v1/events/argo/my-discriminator \ The event endpoint will always return in under 10 seconds because the event will be queued and processed asynchronously. This means you will not be notified synchronously of failure. It will return a failure (503) if the event processing queue is full. !!! Warning "Processing Order" - Events may not always be processed in the order they are received. + Events may not always be processed in the order they are received. ## Workflow Template triggered by the event Before the binding between an event and a workflow template, you must create the workflow template that you want to trigger. -The following one takes in input the "message" parameter specified into the API call body, passed through the WorkflowEventBinding parameters section, and finally resolved here as the message of the whalesay image. +The following one takes in input the "message" parameter specified into the API call body, passed through the `WorkflowEventBinding` parameters section, and finally resolved here as the message of the `whalesay` image. ```yaml apiVersion: argoproj.io/v1alpha1 @@ -66,8 +64,8 @@ spec: ## Submitting A Workflow From A Workflow Template -A workflow template will be submitted (i.e. workflow created from it) and that can be created using parameters from the event itself. -The following example will be triggered by an event with "message" in the payload. That message will be used as an argument for the created workflow. Note that the name of the metadata header "x-argo-e2e" is lowercase in the selector to match. Incoming header names are converted to lowercase. +A workflow template will be submitted (i.e. workflow created from it) and that can be created using parameters from the event itself. +The following example will be triggered by an event with "message" in the payload. That message will be used as an argument for the created workflow. Note that the name of the meta-data header "x-argo-e2e" is lowercase in the selector to match. Incoming header names are converted to lowercase. ```yaml apiVersion: argoproj.io/v1alpha1 @@ -87,8 +85,9 @@ spec: valueFrom: event: payload.message ``` -Please, notice that "workflowTemplateRef" refers to a template with the name "my-wf-tmple", this template has to be created before the triggering of the event. -After that you have to apply the above explained WorkflowEventBinding (in this example this is called event-template.yml) to realize the binding between Workflow Template and event (you can use kubectl to do that): + +Please, notice that `workflowTemplateRef` refers to a template with the name `my-wf-tmple`, this template has to be created before the triggering of the event. +After that you have to apply the above explained `WorkflowEventBinding` (in this example this is called `event-template.yml`) to realize the binding between Workflow Template and event (you can use `kubectl` to do that): ```bash kubectl apply -f event-template.yml @@ -106,39 +105,42 @@ curl $ARGO_SERVER/api/v1/events/argo/my-discriminator \ ``` !!! Warning "Malformed Expressions" - If the expression is malformed, this is logged. It is not visible in logs or the UI. + If the expression is malformed, this is logged. It is not visible in logs or the UI. + +### Customizing the Workflow Meta-Data -### Customizing the Workflow Metadata You can customize the name of the submitted workflow as well as add annotations and labels. This is done by adding a `metadata` object to the submit object. Normally the name of the workflow created from an event is simply the name of the -template with a timestamp appended. This can be customized by setting the name in the +template with a time-stamp appended. This can be customized by setting the name in the `metadata` object. Annotations and labels are added in the same fashion. All the values for the name, annotations and labels are treated as expressions (see -below for details). The `metadata` object is the same `metadata` type as on all +below for details). The `metadata` object is the same `metadata` type as on all Kubernetes resources and as such is parsed in the same manner. It is best to enclose the expression in single quotes to avoid any problems when submitting the event binding to Kubernetes. This is an example snippet of how to set the name, annotations and labels. This is based on the workflow binding from above, and the first event. + ```yaml - submit: - metadata: - annotations: - anAnnotation: 'event.payload.message' - name: 'event.payload.message + "-world"' - labels: - someLabel: '"literal string"' +submit: + metadata: + annotations: + anAnnotation: 'event.payload.message' + name: 'event.payload.message + "-world"' + labels: + someLabel: '"literal string"' ``` + This will result in the workflow being named "hello-world" instead of -`my-wf-tmple-`. There will be an extra label with the key "someLabel" and +`my-wf-tmple-`. There will be an extra label with the key `someLabel` and a value of "literal string". There will also be an extra annotation with the key -"anAnnotation" and a value of "hello" +`anAnnotation` and a value of "hello" Be careful when setting the name. If the name expression evaluates to that of a currently existing workflow, the new workflow will fail to submit. @@ -161,7 +163,7 @@ Because the endpoint accepts any JSON data, it is the user's responsibility to w The event environment contains: * `payload` the event payload. -* `metadata` event metadata, including HTTP headers. +* `metadata` event meta-data, including HTTP headers. * `discriminator` the discriminator from the URL. ### Payload @@ -170,17 +172,17 @@ This is the JSON payload of the event. Example: -``` +```text payload.repository.clone_url == "http://gihub.com/argoproj/argo" ``` -### MetaData +### Meta-Data -Metadata is data about the event, this includes **headers**: +Meta-data is data about the event, this includes **headers**: #### Headers -HTTP header names are lowercase and only include those that have `x-` as their prefix. Their values are lists, not single values. +HTTP header names are lowercase and only include those that have `x-` as their prefix. Their values are lists, not single values. * Wrong: `metadata["X-Github-Event"] == "push"` * Wrong: `metadata["x-github-event"] == "push"` @@ -191,32 +193,32 @@ HTTP header names are lowercase and only include those that have `x-` as their p Example: -``` +```text metadata["x-argo"] == ["yes"] ``` ### Discriminator -This is only for edge-cases where neither the payload, or metadata provide enough information to discriminate. Typically, it should be empty and ignored. +This is only for edge-cases where neither the payload, or meta-data provide enough information to discriminate. Typically, it should be empty and ignored. Example: -``` +```text discriminator == "my-discriminator" ``` ## High-Availability !!! Warning "Run Minimum 2 Replicas" - You MUST run a minimum of two Argo Server replicas if you do not want to lose events. + You MUST run a minimum of two Argo Server replicas if you do not want to lose events. If you are processing large numbers of events, you may need to scale up the Argo Server to handle them. By default, a single Argo Server can be processing 64 events before the endpoint will start returning 503 errors. Vertically you can: - + * Increase the size of the event operation queue `--event-operation-queue-size` (good for temporary event bursts). * Increase the number of workers `--event-worker-count` (good for sustained numbers of events). Horizontally you can: - + * Run more Argo Servers (good for sustained numbers of events AND high-availability). diff --git a/docs/executor_plugins.md b/docs/executor_plugins.md index 7b3074509ae9..69acb5a729e4 100644 --- a/docs/executor_plugins.md +++ b/docs/executor_plugins.md @@ -48,7 +48,7 @@ We need the following: A template executor plugin services HTTP POST requests on `/api/v1/template.execute`: -```shell +```bash curl http://localhost:4355/api/v1/template.execute -d \ '{ "workflow": { @@ -174,14 +174,14 @@ spec: Build and install as follows: -```shell +```bash argo executor-plugin build . kubectl -n argo apply -f hello-executor-plugin-configmap.yaml ``` Check your controller logs: -``` +```text level=info msg="Executor plugin added" name=hello-controller-plugin ``` @@ -264,10 +264,10 @@ Transient errors are retried, all other errors are considered fatal. Fatal errors will result in failed steps. -### Requeue +### Re-Queue It might be the case that the plugin can't finish straight away. E.g. it starts a long running task. When that happens, -you return "Pending" or "Running" a and a requeue time: +you return "Pending" or "Running" a and a re-queue time: ```json { @@ -285,7 +285,7 @@ In this example, the task will be re-queued and `template.execute` will be calle You can find the plugin's log in the agent pod's sidecar, e.g.: -```shell +```bash kubectl -n argo logs ${agentPodName} -c hello-executor-plugin ``` @@ -293,7 +293,7 @@ kubectl -n argo logs ${agentPodName} -c hello-executor-plugin Because plugins are just config maps, you can list them using `kubectl`: -```shell +```bash kubectl get cm -l workflows.argoproj.io/configmap-type=ExecutorPlugin ``` diff --git a/docs/executor_swagger.md b/docs/executor_swagger.md index c04999788199..a2a956793656 100644 --- a/docs/executor_swagger.md +++ b/docs/executor_swagger.md @@ -81,22 +81,21 @@ ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type of the volume that you want to mount. +| fsType | string| `string` | | | fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore TODO: how do we prevent errors in the filesystem from compromising the machine +optional | | -| partition | int32 (formatted integer)| `int32` | | | The partition in the volume that you want to mount. +| partition | int32 (formatted integer)| `int32` | | | partition is the partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as "1". Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). +optional | | -| readOnly | boolean| `bool` | | | Specify "true" to force and set the ReadOnly property in VolumeMounts to "true". -If omitted, the default is "false". +| readOnly | boolean| `bool` | | | readOnly value true will force the readOnly setting in VolumeMounts. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore +optional | | -| volumeID | string| `string` | | | Unique ID of the persistent disk resource in AWS (Amazon EBS volume). +| volumeID | string| `string` | | | volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore | | @@ -201,7 +200,10 @@ It will marshall back to string - marshalling is not symmetric. | | |------|------|---------|:--------:| ------- |-------------|---------| | archive | [ArchiveStrategy](#archive-strategy)| `ArchiveStrategy` | | | | | | archiveLogs | boolean| `bool` | | | ArchiveLogs indicates if the container logs should be archived | | +| artifactGC | [ArtifactGC](#artifact-g-c)| `ArtifactGC` | | | | | | artifactory | [ArtifactoryArtifact](#artifactory-artifact)| `ArtifactoryArtifact` | | | | | +| azure | [AzureArtifact](#azure-artifact)| `AzureArtifact` | | | | | +| deleted | boolean| `bool` | | | Has this been deleted? | | | from | string| `string` | | | From allows an artifact to reference an artifact from a previous step | | | fromExpression | string| `string` | | | FromExpression, if defined, is evaluated to specify the value for the artifact | | | gcs | [GCSArtifact](#g-c-s-artifact)| `GCSArtifact` | | | | | @@ -223,6 +225,37 @@ set when loading input artifacts. | | +### ArtifactGC + + +> ArtifactGC describes how to delete artifacts from completed Workflows + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| podMetadata | [Metadata](#metadata)| `Metadata` | | | | | +| serviceAccountName | string| `string` | | | ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion | | +| strategy | [ArtifactGCStrategy](#artifact-g-c-strategy)| `ArtifactGCStrategy` | | | | | + + + +### ArtifactGCStrategy + + + + +| Name | Type | Go type | Default | Description | Example | +|------|------|---------| ------- |-------------|---------| +| ArtifactGCStrategy | string| string | | | | + + + ### ArtifactLocation @@ -241,6 +274,7 @@ of a single workflow step, which the executor will use as a default location to |------|------|---------|:--------:| ------- |-------------|---------| | archiveLogs | boolean| `bool` | | | ArchiveLogs indicates if the container logs should be archived | | | artifactory | [ArtifactoryArtifact](#artifactory-artifact)| `ArtifactoryArtifact` | | | | | +| azure | [AzureArtifact](#azure-artifact)| `AzureArtifact` | | | | | | gcs | [GCSArtifact](#g-c-s-artifact)| `GCSArtifact` | | | | | | git | [GitArtifact](#git-artifact)| `GitArtifact` | | | | | | hdfs | [HDFSArtifact](#h-d-f-s-artifact)| `HDFSArtifact` | | | | | @@ -267,7 +301,10 @@ of a single workflow step, which the executor will use as a default location to |------|------|---------|:--------:| ------- |-------------|---------| | archive | [ArchiveStrategy](#archive-strategy)| `ArchiveStrategy` | | | | | | archiveLogs | boolean| `bool` | | | ArchiveLogs indicates if the container logs should be archived | | +| artifactGC | [ArtifactGC](#artifact-g-c)| `ArtifactGC` | | | | | | artifactory | [ArtifactoryArtifact](#artifactory-artifact)| `ArtifactoryArtifact` | | | | | +| azure | [AzureArtifact](#azure-artifact)| `AzureArtifact` | | | | | +| deleted | boolean| `bool` | | | Has this been deleted? | | | from | string| `string` | | | From allows an artifact to reference an artifact from a previous step | | | fromExpression | string| `string` | | | FromExpression, if defined, is evaluated to specify the value for the artifact | | | gcs | [GCSArtifact](#g-c-s-artifact)| `GCSArtifact` | | | | | @@ -316,25 +353,53 @@ set when loading input artifacts. | | [][Artifact](#artifact) +### AzureArtifact + + +> AzureArtifact is the location of a an Azure Storage artifact + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| accountKeySecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | +| blob | string| `string` | | | Blob is the blob name (i.e., path) in the container where the artifact resides | | +| container | string| `string` | | | Container is the container where resources will be stored | | +| endpoint | string| `string` | | | Endpoint is the service url associated with an account. It is most likely "https://.blob.core.windows.net" | | +| useSDKCreds | boolean| `bool` | | | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | | + + + ### AzureDataDiskCachingMode +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| AzureDataDiskCachingMode | string| string | | | | +| AzureDataDiskCachingMode | string| string | | +enum | | ### AzureDataDiskKind +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| AzureDataDiskKind | string| string | | | | +| AzureDataDiskKind | string| string | | +enum | | @@ -350,14 +415,14 @@ set when loading input artifacts. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | cachingMode | [AzureDataDiskCachingMode](#azure-data-disk-caching-mode)| `AzureDataDiskCachingMode` | | | | | -| diskName | string| `string` | | | The Name of the data disk in the blob storage | | -| diskURI | string| `string` | | | The URI the data disk in the blob storage | | -| fsType | string| `string` | | | Filesystem type to mount. +| diskName | string| `string` | | | diskName is the Name of the data disk in the blob storage | | +| diskURI | string| `string` | | | diskURI is the URI of data disk in the blob storage | | +| fsType | string| `string` | | | fsType is Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional | | | kind | [AzureDataDiskKind](#azure-data-disk-kind)| `AzureDataDiskKind` | | | | | -| readOnly | boolean| `bool` | | | Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | @@ -374,11 +439,11 @@ the ReadOnly setting in VolumeMounts. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| readOnly | boolean| `bool` | | | Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | -| secretName | string| `string` | | | the name of secret that contains Azure Storage Account Name and Key | | -| shareName | string| `string` | | | Share Name | | +| secretName | string| `string` | | | secretName is the name of secret that contains Azure Storage Account Name and Key | | +| shareName | string| `string` | | | shareName is the azure share Name | | @@ -402,6 +467,25 @@ the ReadOnly setting in VolumeMounts. +### BasicAuth + + +> BasicAuth describes the secret selectors required for basic authentication + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| passwordSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | +| usernameSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | + + + ### CSIVolumeSource @@ -416,17 +500,17 @@ the ReadOnly setting in VolumeMounts. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| driver | string| `string` | | | Driver is the name of the CSI driver that handles this volume. +| driver | string| `string` | | | driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster. | | -| fsType | string| `string` | | | Filesystem type to mount. Ex. "ext4", "xfs", "ntfs". +| fsType | string| `string` | | | fsType to mount. Ex. "ext4", "xfs", "ntfs". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply. +optional | | | nodePublishSecretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| readOnly | boolean| `bool` | | | Specifies a read-only configuration for the volume. +| readOnly | boolean| `bool` | | | readOnly specifies a read-only configuration for the volume. Defaults to false (read/write). +optional | | -| volumeAttributes | map of string| `map[string]string` | | | VolumeAttributes stores driver-specific properties that are passed to the CSI +| volumeAttributes | map of string| `map[string]string` | | | volumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values. +optional | | @@ -497,19 +581,19 @@ Cephfs volumes do not support ownership management or SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| monitors | []string| `[]string` | | | Required: Monitors is a collection of Ceph monitors +| monitors | []string| `[]string` | | | monitors is Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | | -| path | string| `string` | | | Optional: Used as the mounted root, rather than the full Ceph tree, default is / +| path | string| `string` | | | path is Optional: Used as the mounted root, rather than the full Ceph tree, default is / +optional | | -| readOnly | boolean| `bool` | | | Optional: Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly is Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional | | -| secretFile | string| `string` | | | Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret +| secretFile | string| `string` | | | secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| user | string| `string` | | | Optional: User is the rados user name, default is admin +| user | string| `string` | | | user is optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional | | @@ -531,21 +615,40 @@ Cinder volumes support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md +optional | | -| readOnly | boolean| `bool` | | | Optional: Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| volumeID | string| `string` | | | volume id used to identify the volume in cinder. +| volumeID | string| `string` | | | volumeID used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md | | +### ClientCertAuth + + +> ClientCertAuth holds necessary information for client authentication via certificates + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| clientCertSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | +| clientKeySecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | + + + ### ConfigMapEnvSource @@ -573,10 +676,13 @@ TODO: Add other useful fields. apiVersion, kind, uid? ### ConfigMapKeySelector +> +structType=atomic + + **Properties** | Name | Type | Go type | Required | Default | Description | Example | @@ -609,7 +715,7 @@ mode. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | If unspecified, each key-value pair in the Data field of the referenced +| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -621,7 +727,7 @@ relative and may not contain the '..' path or start with '..'. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid? +optional | | -| optional | boolean| `bool` | | | Specify whether the ConfigMap or its keys must be defined +| optional | boolean| `bool` | | | optional specify whether the ConfigMap or its keys must be defined +optional | | @@ -643,7 +749,7 @@ ConfigMap volumes support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| defaultMode | int32 (formatted integer)| `int32` | | | Optional: mode bits used to set permissions on created files by default. +| defaultMode | int32 (formatted integer)| `int32` | | | defaultMode is optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. @@ -651,7 +757,7 @@ Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. +optional | | -| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | If unspecified, each key-value pair in the Data field of the referenced +| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -663,7 +769,7 @@ relative and may not contain the '..' path or start with '..'. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid? +optional | | -| optional | boolean| `bool` | | | Specify whether the ConfigMap or its keys must be defined +| optional | boolean| `bool` | | | optional specify whether the ConfigMap or its keys must be defined +optional | | @@ -680,21 +786,21 @@ TODO: Add other useful fields. apiVersion, kind, uid? | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | args | []string| `[]string` | | | Arguments to the entrypoint. -The docker image's CMD is used if this is not provided. +The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | command | []string| `[]string` | | | Entrypoint array. Not executed within a shell. -The docker image's ENTRYPOINT is used if this is not provided. +The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | env | [][EnvVar](#env-var)| `[]*EnvVar` | | | List of environment variables to set in the container. @@ -709,7 +815,7 @@ sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. +optional | | -| image | string| `string` | | | Docker image name. +| image | string| `string` | | | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -790,21 +896,21 @@ Cannot be updated. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | args | []string| `[]string` | | | Arguments to the entrypoint. -The docker image's CMD is used if this is not provided. +The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | command | []string| `[]string` | | | Entrypoint array. Not executed within a shell. -The docker image's ENTRYPOINT is used if this is not provided. +The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | dependencies | []string| `[]string` | | | | | @@ -820,7 +926,7 @@ sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. +optional | | -| image | string| `string` | | | Docker image name. +| image | string| `string` | | | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -917,6 +1023,23 @@ referred to by services. +### ContainerSetRetryStrategy + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| duration | string| `string` | | | Duration is the time between each retry, examples values are "300ms", "1s" or "5m". +Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". | | +| retries | [IntOrString](#int-or-string)| `IntOrString` | | | | | + + + ### ContainerSetTemplate @@ -929,6 +1052,7 @@ referred to by services. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | containers | [][ContainerNode](#container-node)| `[]*ContainerNode` | | | | | +| retryStrategy | [ContainerSetRetryStrategy](#container-set-retry-strategy)| `ContainerSetRetryStrategy` | | | | | | volumeMounts | [][VolumeMount](#volume-mount)| `[]*VolumeMount` | | | | | @@ -1227,11 +1351,12 @@ can be used as map keys in json. |------|------|---------|:--------:| ------- |-------------|---------| | name | string| `string` | | | Name of the environment variable. Must be a C_IDENTIFIER. | | | value | string| `string` | | | Variable references $(VAR_NAME) are expanded -using the previous defined environment variables in the container and +using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, -the reference in the input string will be unchanged. The $(VAR_NAME) -syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped -references will never be expanded, regardless of whether the variable +the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. +"$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". +Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to "". +optional | | @@ -1354,19 +1479,19 @@ Fibre Channel volumes support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. TODO: how do we prevent errors in the filesystem from compromising the machine +optional | | -| lun | int32 (formatted integer)| `int32` | | | Optional: FC target lun number +| lun | int32 (formatted integer)| `int32` | | | lun is Optional: FC target lun number +optional | | -| readOnly | boolean| `bool` | | | Optional: Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly is Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | -| targetWWNs | []string| `[]string` | | | Optional: FC target worldwide names (WWNs) +| targetWWNs | []string| `[]string` | | | targetWWNs is Optional: FC target worldwide names (WWNs) +optional | | -| wwids | []string| `[]string` | | | Optional: FC volume world wide identifiers (wwids) +| wwids | []string| `[]string` | | | wwids Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. +optional | | @@ -1406,14 +1531,14 @@ provisioned/attached using an exec based plugin. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| driver | string| `string` | | | Driver is the name of the driver to use for this volume. | | -| fsType | string| `string` | | | Filesystem type to mount. +| driver | string| `string` | | | driver is the name of the driver to use for this volume. | | +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script. +optional | | -| options | map of string| `map[string]string` | | | Optional: Extra command options if any. +| options | map of string| `map[string]string` | | | options is Optional: this field holds extra command options if any. +optional | | -| readOnly | boolean| `bool` | | | Optional: Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly is Optional: defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | @@ -1435,10 +1560,10 @@ Flocker volumes do not support ownership management or SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| datasetName | string| `string` | | | Name of the dataset stored as metadata -> name on the dataset for Flocker +| datasetName | string| `string` | | | datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated +optional | | -| datasetUUID | string| `string` | | | UUID of the dataset. This is unique identifier of a Flocker dataset +| datasetUUID | string| `string` | | | datasetUUID is the UUID of the dataset. This is unique identifier of a Flocker dataset +optional | | @@ -1460,21 +1585,21 @@ PDs support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type of the volume that you want to mount. +| fsType | string| `string` | | | fsType is filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk TODO: how do we prevent errors in the filesystem from compromising the machine +optional | | -| partition | int32 (formatted integer)| `int32` | | | The partition in the volume that you want to mount. +| partition | int32 (formatted integer)| `int32` | | | partition is the partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as "1". Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk +optional | | -| pdName | string| `string` | | | Unique name of the PD resource in GCE. Used to identify the disk in GCE. +| pdName | string| `string` | | | pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | | -| readOnly | boolean| `bool` | | | ReadOnly here will force the ReadOnly setting in VolumeMounts. +| readOnly | boolean| `bool` | | | readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk +optional | | @@ -1501,6 +1626,27 @@ More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk +### GRPCAction + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| port | int32 (formatted integer)| `int32` | | | Port number of the gRPC service. Number must be in the range 1 to 65535. | | +| service | string| `string` | | | Service is the name of the service to place in the gRPC HealthCheckRequest +(see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). + +If this is not specified, the default behavior is defined by gRPC. ++optional ++default="" | | + + + ### Gauge @@ -1534,6 +1680,7 @@ More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| +| branch | string| `string` | | | Branch is the branch to fetch when `SingleBranch` is enabled | | | depth | uint64 (formatted integer)| `uint64` | | | Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip | | | disableSubmodules | boolean| `bool` | | | DisableSubmodules disables submodules during git clone | | @@ -1542,6 +1689,7 @@ number of commits from the branch tip | | | passwordSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | | repo | string| `string` | | | Repo is the git repository | | | revision | string| `string` | | | Revision is the git commit, tag, branch to checkout | | +| singleBranch | boolean| `bool` | | | SingleBranch enables single branch clone, using the `branch` parameter | | | sshPrivateKeySecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | | usernameSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | @@ -1563,13 +1711,13 @@ into the Pod's container. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| directory | string| `string` | | | Target directory name. +| directory | string| `string` | | | directory is the target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name. +optional | | -| repository | string| `string` | | | Repository URL | | -| revision | string| `string` | | | Commit hash for the specified revision. +| repository | string| `string` | | | repository is the URL | | +| revision | string| `string` | | | revision is the commit hash for the specified revision. +optional | | @@ -1588,11 +1736,11 @@ the subdirectory with the given name. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| endpoints | string| `string` | | | EndpointsName is the endpoint name that details Glusterfs topology. +| endpoints | string| `string` | | | endpoints is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod | | -| path | string| `string` | | | Path is the Glusterfs volume path. +| path | string| `string` | | | path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod | | -| readOnly | boolean| `bool` | | | ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. +| readOnly | boolean| `bool` | | | readOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod +optional | | @@ -1642,8 +1790,11 @@ It must be set if keytab is used. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | body | string| `string` | | | Body is content of the HTTP Request | | -| headers | [][HTTPHeader](#http-header)| `[]*HTTPHeader` | | | Headers are an optional list of headers to send with HTTP requests | | +| bodyFrom | [HTTPBodySource](#http-body-source)| `HTTPBodySource` | | | | | +| headers | [HTTPHeaders](#http-headers)| `HTTPHeaders` | | | | | +| insecureSkipVerify | boolean| `bool` | | | InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | | | method | string| `string` | | | Method is HTTP methods for HTTP Request | | +| successCondition | string| `string` | | | SuccessCondition is an expression if evaluated to true is considered successful | | | timeoutSeconds | int64 (formatted integer)| `int64` | | | TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds | | | url | string| `string` | | | URL of the HTTP Request | | @@ -1652,7 +1803,7 @@ It must be set if keytab is used. | | ### HTTPArtifact -> HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +> HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container @@ -1663,11 +1814,44 @@ It must be set if keytab is used. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| +| auth | [HTTPAuth](#http-auth)| `HTTPAuth` | | | | | | headers | [][Header](#header)| `[]*Header` | | | Headers are an optional list of headers to send with HTTP requests for artifacts | | | url | string| `string` | | | URL of the artifact | | +### HTTPAuth + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| basicAuth | [BasicAuth](#basic-auth)| `BasicAuth` | | | | | +| clientCert | [ClientCertAuth](#client-cert-auth)| `ClientCertAuth` | | | | | +| oauth2 | [OAuth2Auth](#o-auth2-auth)| `OAuth2Auth` | | | | | + + + +### HTTPBodySource + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| bytes | []uint8 (formatted integer)| `[]uint8` | | | | | + + + ### HTTPGetAction @@ -1723,26 +1907,12 @@ It must be set if keytab is used. | | -### Handler +### HTTPHeaders -> Handler defines a specific action that should be taken -TODO: pass structured data to these actions, and document that data here. - - - - -**Properties** - -| Name | Type | Go type | Required | Default | Description | Example | -|------|------|---------|:--------:| ------- |-------------|---------| -| exec | [ExecAction](#exec-action)| `ExecAction` | | | | | -| httpGet | [HTTPGetAction](#http-get-action)| `HTTPGetAction` | | | | | -| tcpSocket | [TCPSocketAction](#tcp-socket-action)| `TCPSocketAction` | | | | | - - +[][HTTPHeader](#http-header) ### Header @@ -1805,11 +1975,14 @@ pod's hosts file. ### HostPathType +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| HostPathType | string| string | | | | +| HostPathType | string| string | | +enum | | @@ -1827,7 +2000,7 @@ pod's hosts file. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| path | string| `string` | | | Path of the directory on the host. +| path | string| `string` | | | path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath | | | type | [HostPathType](#host-path-type)| `HostPathType` | | | | | @@ -1849,33 +2022,33 @@ ISCSI volumes support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| chapAuthDiscovery | boolean| `bool` | | | whether support iSCSI Discovery CHAP authentication +| chapAuthDiscovery | boolean| `bool` | | | chapAuthDiscovery defines whether support iSCSI Discovery CHAP authentication +optional | | -| chapAuthSession | boolean| `bool` | | | whether support iSCSI Session CHAP authentication +| chapAuthSession | boolean| `bool` | | | chapAuthSession defines whether support iSCSI Session CHAP authentication +optional | | -| fsType | string| `string` | | | Filesystem type of the volume that you want to mount. +| fsType | string| `string` | | | fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi TODO: how do we prevent errors in the filesystem from compromising the machine +optional | | -| initiatorName | string| `string` | | | Custom iSCSI Initiator Name. +| initiatorName | string| `string` | | | initiatorName is the custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface : will be created for the connection. +optional | | -| iqn | string| `string` | | | Target iSCSI Qualified Name. | | -| iscsiInterface | string| `string` | | | iSCSI Interface Name that uses an iSCSI transport. +| iqn | string| `string` | | | iqn is the target iSCSI Qualified Name. | | +| iscsiInterface | string| `string` | | | iscsiInterface is the interface Name that uses an iSCSI transport. Defaults to 'default' (tcp). +optional | | -| lun | int32 (formatted integer)| `int32` | | | iSCSI Target Lun number. | | -| portals | []string| `[]string` | | | iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port +| lun | int32 (formatted integer)| `int32` | | | lun represents iSCSI Target Lun number. | | +| portals | []string| `[]string` | | | portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). +optional | | -| readOnly | boolean| `bool` | | | ReadOnly here will force the ReadOnly setting in VolumeMounts. +| readOnly | boolean| `bool` | | | readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| targetPortal | string| `string` | | | iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port +| targetPortal | string| `string` | | | targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). | | @@ -1945,15 +2118,15 @@ is other than default (typically TCP ports 860 and 3260). | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| key | string| `string` | | | The key to project. | | -| mode | int32 (formatted integer)| `int32` | | | Optional: mode bits used to set permissions on this file. +| key | string| `string` | | | key is the key to project. | | +| mode | int32 (formatted integer)| `int32` | | | mode is Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. +optional | | -| path | string| `string` | | | The relative path of the file to map the key to. +| path | string| `string` | | | path is the relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. | | @@ -2040,8 +2213,29 @@ until the action is complete, unless the container process fails, in which case | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| postStart | [Handler](#handler)| `Handler` | | | | | -| preStop | [Handler](#handler)| `Handler` | | | | | +| postStart | [LifecycleHandler](#lifecycle-handler)| `LifecycleHandler` | | | | | +| preStop | [LifecycleHandler](#lifecycle-handler)| `LifecycleHandler` | | | | | + + + +### LifecycleHandler + + +> LifecycleHandler defines a specific action that should be taken in a lifecycle +hook. One and only one of the fields, except TCPSocket must be specified. + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| exec | [ExecAction](#exec-action)| `ExecAction` | | | | | +| httpGet | [HTTPGetAction](#http-get-action)| `HTTPGetAction` | | | | | +| tcpSocket | [TCPSocketAction](#tcp-socket-action)| `TCPSocketAction` | | | | | @@ -2057,7 +2251,10 @@ until the action is complete, unless the container process fails, in which case | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | arguments | [Arguments](#arguments)| `Arguments` | | | | | -| template | string| `string` | | | | | +| expression | string| `string` | | | Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not +be retried and the retry strategy will be ignored | | +| template | string| `string` | | | Template is the name of the template to execute by the hook | | +| templateRef | [TemplateRef](#template-ref)| `TemplateRef` | | | | | @@ -2073,6 +2270,7 @@ until the action is complete, unless the container process fails, in which case > LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. ++structType=atomic @@ -2114,6 +2312,13 @@ There is currently only one possible value: "FieldsV1" | | | fieldsV1 | [FieldsV1](#fields-v1)| `FieldsV1` | | | | | | manager | string| `string` | | | Manager is an identifier of the workflow managing these fields. | | | operation | [ManagedFieldsOperationType](#managed-fields-operation-type)| `ManagedFieldsOperationType` | | | | | +| subresource | string| `string` | | | Subresource is the name of the subresource used to update that object, or +empty string if the object was updated through the main resource. The +value of this field is used to distinguish between managers, even if they +share the same name. For example, a status update will be distinct from a +regular update using the same manager name. +Note that the APIVersion field is not related to the Subresource field and +it always corresponds to the version of the main resource. | | | time | [Time](#time)| `Time` | | | | | @@ -2129,6 +2334,21 @@ There is currently only one possible value: "FieldsV1" | | +### ManifestFrom + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| artifact | [Artifact](#artifact)| `Artifact` | | | | | + + + ### Memoize @@ -2209,11 +2429,14 @@ than the MaxAge, it will be ignored. | | ### MountPropagationMode +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| MountPropagationMode | string| string | | | | +| MountPropagationMode | string| string | | +enum | | @@ -2249,14 +2472,13 @@ than the MaxAge, it will be ignored. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| path | string| `string` | | | Path that is exported by the NFS server. +| path | string| `string` | | | path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | | -| readOnly | boolean| `bool` | | | ReadOnly here will force -the NFS export to be mounted with read-only permissions. +| readOnly | boolean| `bool` | | | readOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs +optional | | -| server | string| `string` | | | Server is the hostname or IP address of the NFS server. +| server | string| `string` | | | server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | | @@ -2311,6 +2533,7 @@ node(s) with the highest sum are the most preferred. | message | string| `string` | | | | | | outputs | [Outputs](#outputs)| `Outputs` | | | | | | phase | [NodePhase](#node-phase)| `NodePhase` | | | | | +| progress | [Progress](#progress)| `Progress` | | | | | @@ -2320,6 +2543,7 @@ node(s) with the highest sum are the most preferred. > A node selector represents the union of the results of one or more label queries over a set of nodes; that is, it represents the OR of the selectors represented by the node selector terms. ++structType=atomic @@ -2339,6 +2563,7 @@ by the node selector terms. > A node selector operator is the set of operators that can be used in a node selector requirement. ++enum @@ -2346,7 +2571,8 @@ a node selector requirement. | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| | NodeSelectorOperator | string| string | | A node selector operator is the set of operators that can be used in -a node selector requirement. | | +a node selector requirement. ++enum | | @@ -2382,6 +2608,7 @@ This array is replaced during a strategic merge patch. > A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. ++structType=atomic @@ -2411,6 +2638,47 @@ save/load the directory appropriately. [interface{}](#interface) +### OAuth2Auth + + +> OAuth2Auth holds all information for client authentication via OAuth2 tokens + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| clientIDSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | +| clientSecretSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | +| endpointParams | [][OAuth2EndpointParam](#o-auth2-endpoint-param)| `[]*OAuth2EndpointParam` | | | | | +| scopes | []string| `[]string` | | | | | +| tokenURLSecret | [SecretKeySelector](#secret-key-selector)| `SecretKeySelector` | | | | | + + + +### OAuth2EndpointParam + + +> EndpointParam is for requesting optional fields that should be sent in the oauth request + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| key | string| `string` | | | Name is the header name | | +| value | string| `string` | | | Value is the literal value to use for the header | | + + + ### OSSArtifact @@ -2455,21 +2723,14 @@ save/load the directory appropriately. -### Object +### ObjectFieldSelector -> +kubebuilder:validation:Type=object +> +structType=atomic -[interface{}](#interface) - -### ObjectFieldSelector - - - - **Properties** @@ -2526,6 +2787,7 @@ save/load the directory appropriately. > OwnerReference contains enough information to let you identify an owning object. An owning object must be in the same namespace as the dependent, or be cluster-scoped, so there is no namespace field. ++structType=atomic @@ -2540,6 +2802,8 @@ be cluster-scoped, so there is no namespace field. | blockOwnerDeletion | boolean| `bool` | | | If true, AND if the owner has the "foregroundDeletion" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. +See https://kubernetes.io/docs/concepts/architecture/garbage-collection/#foreground-deletion +for how the garbage collector interacts with this field and enforces the foreground deletion. Defaults to false. To set this field, a user needs "delete" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned. @@ -2579,6 +2843,7 @@ More info: http://kubernetes.io/docs/user-guide/identifiers#names | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | default | [AnyString](#any-string)| `AnyString` | | | | | +| description | [AnyString](#any-string)| `AnyString` | | | | | | enum | [][AnyString](#any-string)| `[]AnyString` | | | Enum holds a list of string values to choose from, for the actual value of the parameter | | | globalName | string| `string` | | | GlobalName exports an output parameter to the global scope, making it available as '{{workflow.outputs.parameters.XXXX}} and in workflow.status.outputs.parameters | | @@ -2591,11 +2856,14 @@ More info: http://kubernetes.io/docs/user-guide/identifiers#names | | ### PersistentVolumeAccessMode +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| PersistentVolumeAccessMode | string| string | | | | +| PersistentVolumeAccessMode | string| string | | +enum | | @@ -2614,17 +2882,18 @@ and allows a Source for provider-specific attributes | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| accessModes | [][PersistentVolumeAccessMode](#persistent-volume-access-mode)| `[]PersistentVolumeAccessMode` | | | AccessModes contains the desired access modes the volume should have. +| accessModes | [][PersistentVolumeAccessMode](#persistent-volume-access-mode)| `[]PersistentVolumeAccessMode` | | | accessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 +optional | | | dataSource | [TypedLocalObjectReference](#typed-local-object-reference)| `TypedLocalObjectReference` | | | | | +| dataSourceRef | [TypedLocalObjectReference](#typed-local-object-reference)| `TypedLocalObjectReference` | | | | | | resources | [ResourceRequirements](#resource-requirements)| `ResourceRequirements` | | | | | | selector | [LabelSelector](#label-selector)| `LabelSelector` | | | | | -| storageClassName | string| `string` | | | Name of the StorageClass required by the claim. +| storageClassName | string| `string` | | | storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 +optional | | | volumeMode | [PersistentVolumeMode](#persistent-volume-mode)| `PersistentVolumeMode` | | | | | -| volumeName | string| `string` | | | VolumeName is the binding reference to the PersistentVolume backing this claim. +| volumeName | string| `string` | | | volumeName is the binding reference to the PersistentVolume backing this claim. +optional | | @@ -2649,9 +2918,12 @@ set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations +optional | | -| clusterName | string| `string` | | | The name of the cluster which the object belongs to. -This is used to distinguish resources with same name and namespace in different clusters. -This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request. +| clusterName | string| `string` | | | Deprecated: ClusterName is a legacy field that was always cleared by +the system and never used; it will be removed completely in 1.25. + +The name in the go struct is changed to help clients detect +accidental use. + +optional | | | creationTimestamp | [Time](#time)| `Time` | | | | | | deletionGracePeriodSeconds | int64 (formatted integer)| `int64` | | | Number of seconds allowed for this object to gracefully terminate before @@ -2683,10 +2955,7 @@ The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server. -If this field is specified and the generated name exists, the server will -NOT return a 409 - instead, it will either return 201 Created or 500 with Reason -ServerTimeout indicating a unique name could not be found in the time allotted, and the client -should retry (optionally after the time indicated in the Retry-After header). +If this field is specified and the generated name exists, the server will return a 409. Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency @@ -2742,13 +3011,7 @@ Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency +optional | | -| selfLink | string| `string` | | | SelfLink is a URL representing this object. -Populated by the system. -Read-only. - -DEPRECATED -Kubernetes will stop propagating this field in 1.20 release and the field is planned -to be removed in 1.21 release. +| selfLink | string| `string` | | | Deprecated: selfLink is a legacy read-only field that is no longer populated by the system. +optional | | | spec | [PersistentVolumeClaimSpec](#persistent-volume-claim-spec)| `PersistentVolumeClaimSpec` | | | | | | uid | [UID](#uid)| `UID` | | | | | @@ -2771,9 +3034,9 @@ type of volume that is owned by someone else (the system). | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| claimName | string| `string` | | | ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. +| claimName | string| `string` | | | claimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims | | -| readOnly | boolean| `bool` | | | Will force the ReadOnly setting in VolumeMounts. +| readOnly | boolean| `bool` | | | readOnly Will force the ReadOnly setting in VolumeMounts. Default false. +optional | | @@ -2782,11 +3045,14 @@ Default false. ### PersistentVolumeMode +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| PersistentVolumeMode | string| string | | | | +| PersistentVolumeMode | string| string | | +enum | | @@ -2801,12 +3067,22 @@ Default false. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. | | -| pdID | string| `string` | | | ID that identifies Photon Controller persistent disk | | +| pdID | string| `string` | | | pdID is the ID that identifies Photon Controller persistent disk | | + + + +### Plugin + + +> Plugin is an Object with exactly one key + + +[interface{}](#interface) ### PodAffinity @@ -2864,7 +3140,7 @@ a pod of the set of pods is running | namespaces | []string| `[]string` | | | namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. -null or empty namespaces list and null namespaceSelector means "this pod's namespace" +null or empty namespaces list and null namespaceSelector means "this pod's namespace". +optional | | | topologyKey | string| `string` | | | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node @@ -2911,6 +3187,7 @@ podAffinityTerm are intersected, i.e. all terms must be satisfied. > PodFSGroupChangePolicy holds policies that will be used for applying fsGroup to a volume when volume is mounted. ++enum @@ -2918,7 +3195,8 @@ when volume is mounted. | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| | PodFSGroupChangePolicy | string| string | | PodFSGroupChangePolicy holds policies that will be used for applying fsGroup to a volume -when volume is mounted. | | +when volume is mounted. ++enum | | @@ -2946,6 +3224,7 @@ to be owned by the pod: 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. +Note that this field cannot be set when spec.os.name is windows. +optional | | | fsGroupChangePolicy | [PodFSGroupChangePolicy](#pod-f-s-group-change-policy)| `PodFSGroupChangePolicy` | | | | | | runAsGroup | int64 (formatted integer)| `int64` | | | The GID to run the entrypoint of the container process. @@ -2953,6 +3232,7 @@ Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. +Note that this field cannot be set when spec.os.name is windows. +optional | | | runAsNonRoot | boolean| `bool` | | | Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it @@ -2966,15 +3246,18 @@ Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. +Note that this field cannot be set when spec.os.name is windows. +optional | | | seLinuxOptions | [SELinuxOptions](#s-e-linux-options)| `SELinuxOptions` | | | | | | seccompProfile | [SeccompProfile](#seccomp-profile)| `SeccompProfile` | | | | | | supplementalGroups | []int64 (formatted integer)| `[]int64` | | | A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. +Note that this field cannot be set when spec.os.name is windows. +optional | | | sysctls | [][Sysctl](#sysctl)| `[]*Sysctl` | | | Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. +Note that this field cannot be set when spec.os.name is windows. +optional | | | windowsOptions | [WindowsSecurityContextOptions](#windows-security-context-options)| `WindowsSecurityContextOptions` | | | | | @@ -2991,13 +3274,13 @@ sysctls (by the container runtime) might fail to launch. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | FSType represents the filesystem type to mount +| fsType | string| `string` | | | fSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs". Implicitly inferred to be "ext4" if unspecified. | | -| readOnly | boolean| `bool` | | | Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | -| volumeID | string| `string` | | | VolumeID uniquely identifies a Portworx volume | | +| volumeID | string| `string` | | | volumeID uniquely identifies a Portworx volume | | @@ -3040,6 +3323,7 @@ alive or ready to receive traffic. | failureThreshold | int32 (formatted integer)| `int32` | | | Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1. +optional | | +| grpc | [GRPCAction](#g-rpc-action)| `GRPCAction` | | | | | | httpGet | [HTTPGetAction](#http-get-action)| `HTTPGetAction` | | | | | | initialDelaySeconds | int32 (formatted integer)| `int32` | | | Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes @@ -3059,7 +3343,8 @@ If this value is nil, the pod's terminationGracePeriodSeconds will be used. Othe value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). -This is an alpha field and requires enabling ProbeTerminationGracePeriod feature gate. +This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. +Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset. +optional | | | timeoutSeconds | int32 (formatted integer)| `int32` | | | Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. @@ -3071,11 +3356,25 @@ More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#cont ### ProcMountType +> +enum + + + + +| Name | Type | Go type | Default | Description | Example | +|------|------|---------| ------- |-------------|---------| +| ProcMountType | string| string | | +enum | | + + + +### Progress + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| ProcMountType | string| string | | | | +| Progress | string| string | | | | @@ -3093,14 +3392,14 @@ More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#cont | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| defaultMode | int32 (formatted integer)| `int32` | | | Mode bits used to set permissions on created files by default. +| defaultMode | int32 (formatted integer)| `int32` | | | defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. +optional | | -| sources | [][VolumeProjection](#volume-projection)| `[]*VolumeProjection` | | | list of volume projections +| sources | [][VolumeProjection](#volume-projection)| `[]*VolumeProjection` | | | sources is the list of volume projections +optional | | @@ -3132,11 +3431,14 @@ mode, like fsGroup, and the result can be other mode bits set. ### Protocol +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| Protocol | string| string | | | | +| Protocol | string| string | | +enum | | @@ -3144,13 +3446,15 @@ mode, like fsGroup, and the result can be other mode bits set. > PullPolicy describes a policy for if/when to pull a container image ++enum | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| PullPolicy | string| string | | PullPolicy describes a policy for if/when to pull a container image | | +| PullPolicy | string| string | | PullPolicy describes a policy for if/when to pull a container image ++enum | | @@ -3231,22 +3535,22 @@ cause implementors to also use a fixed point implementation. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| group | string| `string` | | | Group to map volume access to +| group | string| `string` | | | group to map volume access to Default is no group +optional | | -| readOnly | boolean| `bool` | | | ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. +| readOnly | boolean| `bool` | | | readOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false. +optional | | -| registry | string| `string` | | | Registry represents a single or multiple Quobyte Registry services +| registry | string| `string` | | | registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes | | -| tenant | string| `string` | | | Tenant owning the given Quobyte volume in the Backend +| tenant | string| `string` | | | tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin +optional | | -| user | string| `string` | | | User to map volume access to +| user | string| `string` | | | user to map volume access to Defaults to serivceaccount user +optional | | -| volume | string| `string` | | | Volume is a string that references an already created Quobyte volume by name. | | +| volume | string| `string` | | | volume is a string that references an already created Quobyte volume by name. | | @@ -3264,30 +3568,30 @@ Defaults to serivceaccount user | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type of the volume that you want to mount. +| fsType | string| `string` | | | fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd TODO: how do we prevent errors in the filesystem from compromising the machine +optional | | -| image | string| `string` | | | The rados image name. +| image | string| `string` | | | image is the rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | | -| keyring | string| `string` | | | Keyring is the path to key ring for RBDUser. +| keyring | string| `string` | | | keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional | | -| monitors | []string| `[]string` | | | A collection of Ceph monitors. +| monitors | []string| `[]string` | | | monitors is a collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | | -| pool | string| `string` | | | The rados pool name. +| pool | string| `string` | | | pool is the rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional | | -| readOnly | boolean| `bool` | | | ReadOnly here will force the ReadOnly setting in VolumeMounts. +| readOnly | boolean| `bool` | | | readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| user | string| `string` | | | The rados user name. +| user | string| `string` | | | user is the rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional | | @@ -3316,6 +3620,7 @@ More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it > ResourceFieldSelector represents container resources (cpu, memory) and their output format ++structType=atomic @@ -3380,6 +3685,7 @@ flags: [ "--validate=false" # disable resource validation ] | | | manifest | string| `string` | | | Manifest contains the kubernetes manifest | | +| manifestFrom | [ManifestFrom](#manifest-from)| `ManifestFrom` | | | | | | mergeStrategy | string| `string` | | | MergeStrategy is the strategy used to merge a patch. It defaults to "strategic" Must be one of: strategic, merge, json | | | setOwnerReference | boolean| `bool` | | | SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. | | @@ -3535,27 +3841,27 @@ be retried and the retry strategy will be ignored | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Default is "xfs". +optional | | -| gateway | string| `string` | | | The host address of the ScaleIO API Gateway. | | -| protectionDomain | string| `string` | | | The name of the ScaleIO Protection Domain for the configured storage. +| gateway | string| `string` | | | gateway is the host address of the ScaleIO API Gateway. | | +| protectionDomain | string| `string` | | | protectionDomain is the name of the ScaleIO Protection Domain for the configured storage. +optional | | -| readOnly | boolean| `bool` | | | Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| sslEnabled | boolean| `bool` | | | Flag to enable/disable SSL communication with Gateway, default false +| sslEnabled | boolean| `bool` | | | sslEnabled Flag enable/disable SSL communication with Gateway, default false +optional | | -| storageMode | string| `string` | | | Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. +| storageMode | string| `string` | | | storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned. +optional | | -| storagePool | string| `string` | | | The ScaleIO Storage Pool associated with the protection domain. +| storagePool | string| `string` | | | storagePool is the ScaleIO Storage Pool associated with the protection domain. +optional | | -| system | string| `string` | | | The name of the storage system as configured in ScaleIO. | | -| volumeName | string| `string` | | | The name of a volume already created in the ScaleIO system +| system | string| `string` | | | system is the name of the storage system as configured in ScaleIO. | | +| volumeName | string| `string` | | | volumeName is the name of a volume already created in the ScaleIO system that is associated with this volume source. | | @@ -3575,21 +3881,21 @@ that is associated with this volume source. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | args | []string| `[]string` | | | Arguments to the entrypoint. -The docker image's CMD is used if this is not provided. +The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | command | []string| `[]string` | | | Entrypoint array. Not executed within a shell. -The docker image's ENTRYPOINT is used if this is not provided. +The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | env | [][EnvVar](#env-var)| `[]*EnvVar` | | | List of environment variables to set in the container. @@ -3604,7 +3910,7 @@ sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. +optional | | -| image | string| `string` | | | Docker image name. +| image | string| `string` | | | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -3701,11 +4007,14 @@ Must only be set if type is "Localhost". ### SeccompProfileType +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| SeccompProfileType | string| string | | | | +| SeccompProfileType | string| string | | +enum | | @@ -3736,10 +4045,13 @@ TODO: Add other useful fields. apiVersion, kind, uid? ### SecretKeySelector +> +structType=atomic + + **Properties** | Name | Type | Go type | Required | Default | Description | Example | @@ -3771,7 +4083,7 @@ mode. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | If unspecified, each key-value pair in the Data field of the referenced +| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -3783,7 +4095,7 @@ relative and may not contain the '..' path or start with '..'. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid? +optional | | -| optional | boolean| `bool` | | | Specify whether the Secret or its key must be defined +| optional | boolean| `bool` | | | optional field specify whether the Secret or its key must be defined +optional | | @@ -3804,7 +4116,7 @@ Secret volumes support ownership management and SELinux relabeling. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| defaultMode | int32 (formatted integer)| `int32` | | | Optional: mode bits used to set permissions on created files by default. +| defaultMode | int32 (formatted integer)| `int32` | | | defaultMode is Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. @@ -3812,7 +4124,7 @@ Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. +optional | | -| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | If unspecified, each key-value pair in the Data field of the referenced +| items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -3820,9 +4132,9 @@ present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. +optional | | -| optional | boolean| `bool` | | | Specify whether the Secret or its keys must be defined +| optional | boolean| `bool` | | | optional field specify whether the Secret or its keys must be defined +optional | | -| secretName | string| `string` | | | Name of the secret in the pod's namespace to use. +| secretName | string| `string` | | | secretName is the name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret +optional | | @@ -3849,20 +4161,24 @@ the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN +Note that this field cannot be set when spec.os.name is windows. +optional | | | capabilities | [Capabilities](#capabilities)| `Capabilities` | | | | | | privileged | boolean| `bool` | | | Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. +Note that this field cannot be set when spec.os.name is windows. +optional | | | procMount | [ProcMountType](#proc-mount-type)| `ProcMountType` | | | | | | readOnlyRootFilesystem | boolean| `bool` | | | Whether this container has a read-only root filesystem. Default is false. +Note that this field cannot be set when spec.os.name is windows. +optional | | | runAsGroup | int64 (formatted integer)| `int64` | | | The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. +Note that this field cannot be set when spec.os.name is windows. +optional | | | runAsNonRoot | boolean| `bool` | | | Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it @@ -3875,6 +4191,7 @@ PodSecurityContext, the value specified in SecurityContext takes precedence. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. +Note that this field cannot be set when spec.os.name is windows. +optional | | | seLinuxOptions | [SELinuxOptions](#s-e-linux-options)| `SELinuxOptions` | | | | | | seccompProfile | [SeccompProfile](#seccomp-profile)| `SeccompProfile` | | | | | @@ -3938,19 +4255,19 @@ otherwise). | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| audience | string| `string` | | | Audience is the intended audience of the token. A recipient of a token +| audience | string| `string` | | | audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver. +optional | | -| expirationSeconds | int64 (formatted integer)| `int64` | | | ExpirationSeconds is the requested duration of validity of the service +| expirationSeconds | int64 (formatted integer)| `int64` | | | expirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of its time to live or if the token is older than 24 hours.Defaults to 1 hour and must be at least 10 minutes. +optional | | -| path | string| `string` | | | Path is the path relative to the mount point of the file to project the +| path | string| `string` | | | path is the path relative to the mount point of the file to project the token into. | | @@ -3977,17 +4294,17 @@ token into. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional | | -| readOnly | boolean| `bool` | | | Defaults to false (read/write). ReadOnly here will force +| readOnly | boolean| `bool` | | | readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| volumeName | string| `string` | | | VolumeName is the human-readable name of the StorageOS volume. Volume +| volumeName | string| `string` | | | volumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace. | | -| volumeNamespace | string| `string` | | | VolumeNamespace specifies the scope of the volume within StorageOS. If no +| volumeNamespace | string| `string` | | | volumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. @@ -4083,11 +4400,14 @@ Namespaces that do not pre-exist within StorageOS will be created. ### TaintEffect +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| TaintEffect | string| string | | | | +| TaintEffect | string| string | | +enum | | @@ -4155,7 +4475,7 @@ run on the selected node(s). Overrides the selector set at the workflow level. | | parallelism | int64 (formatted integer)| `int64` | | | Parallelism limits the max total parallel pods that can execute at the same time within the boundaries of this template invocation. If additional steps/dag templates are invoked, the pods created by those templates will not be counted towards this total. | | -| plugin | [Object](#object)| `Object` | | | | | +| plugin | [Plugin](#plugin)| `Plugin` | | | | | | podSpecPatch | string| `string` | | | PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits). | | | priority | int32 (formatted integer)| `int32` | | | Priority to apply to workflow pods. | | @@ -4176,7 +4496,7 @@ Sidecars are automatically killed when the main container completes | steps | [][ParallelSteps](#parallel-steps)| `[]ParallelSteps` | | | Steps define a series of sequential/parallel workflow steps | | | suspend | [SuspendTemplate](#suspend-template)| `SuspendTemplate` | | | | | | synchronization | [Synchronization](#synchronization)| `Synchronization` | | | | | -| timeout | string| `string` | | | Timout allows to set the total node execution timeout duration counting from the node's start time. +| timeout | string| `string` | | | Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates. | | | tolerations | [][Toleration](#toleration)| `[]*Toleration` | | | Tolerations to apply to workflow pods. +patchStrategy=merge @@ -4207,11 +4527,14 @@ This duration also includes time in which the node spends in Pending state. This ### TerminationMessagePolicy +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| TerminationMessagePolicy | string| string | | | | +| TerminationMessagePolicy | string| string | | +enum | | @@ -4261,11 +4584,14 @@ If the operator is Exists, the value should be empty, otherwise just a regular s ### TolerationOperator +> +enum + + | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| TolerationOperator | string| string | | | | +| TolerationOperator | string| string | | +enum | | @@ -4307,6 +4633,7 @@ If the operator is Exists, the value should be empty, otherwise just a regular s > TypedLocalObjectReference contains enough information to let you locate the typed referenced object inside the same namespace. ++structType=atomic @@ -4348,13 +4675,15 @@ intent and helps make sure that UIDs and names do not get conflated. | | > URIScheme identifies the scheme used for connection to a host for Get actions ++enum | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| URIScheme | string| string | | URIScheme identifies the scheme used for connection to a host for Get actions | | +| URIScheme | string| string | | URIScheme identifies the scheme used for connection to a host for Get actions ++enum | | @@ -4370,21 +4699,21 @@ intent and helps make sure that UIDs and names do not get conflated. | | | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | args | []string| `[]string` | | | Arguments to the entrypoint. -The docker image's CMD is used if this is not provided. +The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | command | []string| `[]string` | | | Entrypoint array. Not executed within a shell. -The docker image's ENTRYPOINT is used if this is not provided. +The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable -cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax -can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, -regardless of whether the variable exists or not. -Cannot be updated. +cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced +to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will +produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless +of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional | | | env | [][EnvVar](#env-var)| `[]*EnvVar` | | | List of environment variables to set in the container. @@ -4399,7 +4728,7 @@ sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. +optional | | -| image | string| `string` | | | Docker image name. +| image | string| `string` | | | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -4528,7 +4857,7 @@ Cannot be updated. | glusterfs | [GlusterfsVolumeSource](#glusterfs-volume-source)| `GlusterfsVolumeSource` | | | | | | hostPath | [HostPathVolumeSource](#host-path-volume-source)| `HostPathVolumeSource` | | | | | | iscsi | [ISCSIVolumeSource](#i-s-c-s-i-volume-source)| `ISCSIVolumeSource` | | | | | -| name | string| `string` | | | Volume's name. +| name | string| `string` | | | name of the volume. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | | | nfs | [NFSVolumeSource](#n-f-s-volume-source)| `NFSVolumeSource` | | | | | @@ -4622,15 +4951,15 @@ SubPathExpr and SubPath are mutually exclusive. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | Filesystem type to mount. +| fsType | string| `string` | | | fsType is filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional | | -| storagePolicyID | string| `string` | | | Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName. +| storagePolicyID | string| `string` | | | storagePolicyID is the storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName. +optional | | -| storagePolicyName | string| `string` | | | Storage Policy Based Management (SPBM) profile name. +| storagePolicyName | string| `string` | | | storagePolicyName is the storage Policy Based Management (SPBM) profile name. +optional | | -| volumePath | string| `string` | | | Path that identifies vSphere volume vmdk | | +| volumePath | string| `string` | | | volumePath is the path that identifies vSphere volume vmdk | | @@ -4671,6 +5000,14 @@ GMSA credential spec named by the GMSACredentialSpecName field. +optional | | | gmsaCredentialSpecName | string| `string` | | | GMSACredentialSpecName is the name of the GMSA credential spec to use. +optional | | +| hostProcess | boolean| `bool` | | | HostProcess determines if a container should be run as a 'Host Process' container. +This field is alpha-level and will only be honored by components that enable the +WindowsHostProcessContainers feature flag. Setting this field without the feature +flag will result in errors when validating the Pod. All of a Pod's containers must +have the same effective HostProcess value (it is not allowed to have a mix of HostProcess +containers and non-HostProcess containers). In addition, if HostProcess is true +then HostNetwork must also be set to true. ++optional | | | runAsUserName | string| `string` | | | The UserName in Windows to run the entrypoint of the container process. Defaults to the user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and diff --git a/docs/faq.md b/docs/faq.md index 2b09d5f42e1b..c4f09490bee6 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -1,16 +1,16 @@ # FAQ -> "token not valid for running mode", "any bearer token is able to login in the UI or use the API" +## "token not valid for running mode", "any bearer token is able to login in the UI or use the API" You've not configured Argo Server authentication correctly. If you want SSO, try running with `--auth-mode=sso`. [Learn more about the Argo Server set-up](argo-server.md) -> Argo Server return EOF error +## Argo Server return EOF error Since v3.0 the Argo Server listens for HTTPS requests, rather than HTTP. Try changing your URL to HTTPS, or start Argo Server using `--secure=false`. -> My workflow hangs +## My workflow hangs Check your `wait` container logs: @@ -18,13 +18,13 @@ Is there an RBAC error? [Learn more about workflow RBAC](workflow-rbac.md) -> Return "unknown (get pods)" error +## Return "unknown (get pods)" error You're probably getting a permission denied error because your RBAC is not configured. [Learn more about workflow RBAC](workflow-rbac.md) and [even more details](https://blog.argoproj.io/demystifying-argo-workflowss-kubernetes-rbac-7a1406d446fc) -> There is an error about /var/run/docker.sock. +## There is an error about `/var/run/docker.sock` Try using a different container runtime executor. diff --git a/docs/fields.md b/docs/fields.md index 80a111d33ceb..3abd9071afba 100644 --- a/docs/fields.md +++ b/docs/fields.md @@ -18,6 +18,8 @@ Workflow is the definition of a workflow resource - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -168,6 +170,8 @@ Workflow is the definition of a workflow resource - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -224,6 +228,8 @@ Workflow is the definition of a workflow resource - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -330,6 +336,8 @@ Workflow is the definition of a workflow resource - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`workflow-of-workflows.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-of-workflows.yaml) @@ -390,8 +398,12 @@ WorkflowTemplate is the definition of a workflow template resource Examples (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`cron-backfill.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cron-backfill.yaml) - [`dag-inline-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/dag-inline-workflowtemplate.yaml) @@ -427,6 +439,8 @@ WorkflowSpec is the specification of a Workflow. - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -437,10 +451,14 @@ WorkflowSpec is the specification of a Workflow. - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -587,6 +605,8 @@ WorkflowSpec is the specification of a Workflow. - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -633,6 +653,8 @@ WorkflowSpec is the specification of a Workflow. - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -735,6 +757,8 @@ WorkflowSpec is the specification of a Workflow. - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -765,6 +789,7 @@ WorkflowSpec is the specification of a Workflow. |`affinity`|[`Affinity`](#affinity)|Affinity sets the scheduling constraints for all pods in the io.argoproj.workflow.v1alpha1. Can be overridden by an affinity specified in the template| |`archiveLogs`|`boolean`|ArchiveLogs indicates if the container logs should be archived| |`arguments`|[`Arguments`](#arguments)|Arguments contain the parameters and artifacts sent to the workflow entrypoint Parameters are referencable globally using the 'workflow' variable prefix. e.g. {{io.argoproj.workflow.v1alpha1.parameters.myparam}}| +|`artifactGC`|[`ArtifactGC`](#artifactgc)|ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts unless Artifact.ArtifactGC is specified, which overrides this)| |`artifactRepositoryRef`|[`ArtifactRepositoryRef`](#artifactrepositoryref)|ArtifactRepositoryRef specifies the configMap name and key containing the artifact repository config.| |`automountServiceAccountToken`|`boolean`|AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.| |`dnsConfig`|[`PodDNSConfig`](#poddnsconfig)|PodDNSConfig defines the DNS parameters of a pod in addition to those generated from DNSPolicy.| @@ -780,9 +805,9 @@ WorkflowSpec is the specification of a Workflow. |`onExit`|`string`|OnExit is a template reference which is invoked at the end of the workflow, irrespective of the success, failure, or error of the primary io.argoproj.workflow.v1alpha1.| |`parallelism`|`integer`|Parallelism limits the max total parallel pods that can execute at the same time in a workflow| |`podDisruptionBudget`|[`PodDisruptionBudgetSpec`](#poddisruptionbudgetspec)|PodDisruptionBudget holds the number of concurrent disruptions that you allow for Workflow's Pods. Controller will automatically add the selector with workflow name, if selector is empty. Optional: Defaults to empty.| -|`podGC`|[`PodGC`](#podgc)|PodGC describes the strategy to use when to deleting completed pods| +|`podGC`|[`PodGC`](#podgc)|PodGC describes the strategy to use when deleting completed pods| |`podMetadata`|[`Metadata`](#metadata)|PodMetadata defines additional metadata that should be applied to workflow pods| -|`podPriority`|`integer`|Priority to apply to workflow pods.| +|~`podPriority`~|~`integer`~|~Priority to apply to workflow pods.~ DEPRECATED: Use PodPriorityClassName instead.| |`podPriorityClassName`|`string`|PriorityClassName to apply to workflow pods.| |`podSpecPatch`|`string`|PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).| |`priority`|`integer`|Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.| @@ -797,10 +822,10 @@ WorkflowSpec is the specification of a Workflow. |`templates`|`Array<`[`Template`](#template)`>`|Templates is a list of workflow templates used in a workflow| |`tolerations`|`Array<`[`Toleration`](#toleration)`>`|Tolerations to apply to workflow pods.| |`ttlStrategy`|[`TTLStrategy`](#ttlstrategy)|TTLStrategy limits the lifetime of a Workflow that has finished execution depending on if it Succeeded or Failed. If this struct is set, once the Workflow finishes, it will be deleted after the time to live expires. If this field is unset, the controller config map will hold the default values.| -|`volumeClaimGC`|[`VolumeClaimGC`](#volumeclaimgc)|VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows| +|`volumeClaimGC`|[`VolumeClaimGC`](#volumeclaimgc)|VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows| |`volumeClaimTemplates`|`Array<`[`PersistentVolumeClaim`](#persistentvolumeclaim)`>`|VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow| |`volumes`|`Array<`[`Volume`](#volume)`>`|Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1.| -|`workflowMetadata`|[`WorkflowMetadata`](#workflowmetadata)|WorkflowMetadata contains some metadata of the workflow to be refer| +|`workflowMetadata`|[`WorkflowMetadata`](#workflowmetadata)|WorkflowMetadata contains some metadata of the workflow to refer to| |`workflowTemplateRef`|[`WorkflowTemplateRef`](#workflowtemplateref)|WorkflowTemplateRef holds a reference to a WorkflowTemplate for execution| ## WorkflowStatus @@ -810,6 +835,7 @@ WorkflowStatus contains overall status information about a workflow ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| +|`artifactGCStatus`|[`ArtGCStatus`](#artgcstatus)|ArtifactGCStatus maintains the status of Artifact Garbage Collection| |`artifactRepositoryRef`|[`ArtifactRepositoryRefStatus`](#artifactrepositoryrefstatus)|ArtifactRepositoryRef is used to cache the repository to use so we do not need to determine it everytime we reconcile.| |`compressedNodes`|`string`|Compressed and base64 decoded Nodes map| |`conditions`|`Array<`[`Condition`](#condition)`>`|Conditions is a list of conditions the Workflow may have| @@ -846,6 +872,8 @@ CronWorkflowSpec is the specification of a CronWorkflow - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -856,10 +884,14 @@ CronWorkflowSpec is the specification of a CronWorkflow - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -1006,6 +1038,8 @@ CronWorkflowSpec is the specification of a CronWorkflow - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -1052,6 +1086,8 @@ CronWorkflowSpec is the specification of a CronWorkflow - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -1154,6 +1190,8 @@ CronWorkflowSpec is the specification of a CronWorkflow - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -1227,6 +1265,8 @@ Arguments to a template - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -1390,6 +1430,24 @@ Arguments to a template |`artifacts`|`Array<`[`Artifact`](#artifact)`>`|Artifacts is the list of artifacts to pass to the template or workflow| |`parameters`|`Array<`[`Parameter`](#parameter)`>`|Parameters is the list of parameters to pass to the template or workflow| +## ArtifactGC + +ArtifactGC describes how to delete artifacts from completed Workflows + +
+Examples with this field (click to open) +
+ +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`podMetadata`|[`Metadata`](#metadata)|PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion| +|`serviceAccountName`|`string`|ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion| +|`strategy`|`string`|Strategy is the strategy to use.| + ## ArtifactRepositoryRef _No description available_ @@ -1547,7 +1605,7 @@ RetryStrategy provides controls on how to retry a workflow step |`affinity`|[`RetryAffinity`](#retryaffinity)|Affinity prevents running workflow's step on the same host| |`backoff`|[`Backoff`](#backoff)|Backoff is a backoff strategy| |`expression`|`string`|Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored| -|`limit`|[`IntOrString`](#intorstring)|Limit is the maximum number of attempts when retrying a container| +|`limit`|[`IntOrString`](#intorstring)|Limit is the maximum number of retry attempts when retrying a container. It does not include the original container; the maximum number of total attempts will be `limit + 1`.| |`retryPolicy`|`string`|RetryPolicy is a policy of NodePhase statuses that will be retried| ## Synchronization @@ -1697,6 +1755,17 @@ WorkflowTemplateRef is a reference to a WorkflowTemplate resource. |`clusterScope`|`boolean`|ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).| |`name`|`string`|Name is the resource name of the workflow template.| +## ArtGCStatus + +ArtGCStatus maintains state related to ArtifactGC + +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`notSpecified`|`boolean`|if this is true, we already checked to see if we need to do it and we don't| +|`podsRecouped`|`Map< boolean , string >`|have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once| +|`strategiesProcessed`|`Map< boolean , string >`|have Pods been started to perform this strategy? (enables us not to re-process what we've already done)| + ## ArtifactRepositoryRefStatus _No description available_ @@ -1770,6 +1839,8 @@ Outputs hold parameters, artifacts, and results from a step - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -1780,8 +1851,12 @@ Outputs hold parameters, artifacts, and results from a step - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`conditional-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/conditional-artifacts.yaml) - [`conditional-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/conditional-parameters.yaml) @@ -1822,6 +1897,8 @@ Outputs hold parameters, artifacts, and results from a step - [`nested-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/nested-workflow.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -1836,6 +1913,8 @@ Outputs hold parameters, artifacts, and results from a step - [`suspend-template-outputs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/suspend-template-outputs.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) @@ -1878,6 +1957,8 @@ Artifact indicates an artifact to place at a specified path - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -1888,8 +1969,12 @@ Artifact indicates an artifact to place at a specified path - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`conditional-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/conditional-artifacts.yaml) @@ -1912,6 +1997,8 @@ Artifact indicates an artifact to place at a specified path - [`influxdb-ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/influxdb-ci.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -1930,10 +2017,14 @@ Artifact indicates an artifact to place at a specified path - [`nested-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/nested-workflow.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) @@ -1942,7 +2033,10 @@ Artifact indicates an artifact to place at a specified path |:----------:|:----------:|---------------| |`archive`|[`ArchiveStrategy`](#archivestrategy)|Archive controls how the artifact will be saved to the artifact repository.| |`archiveLogs`|`boolean`|ArchiveLogs indicates if the container logs should be archived| +|`artifactGC`|[`ArtifactGC`](#artifactgc)|ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows| |`artifactory`|[`ArtifactoryArtifact`](#artifactoryartifact)|Artifactory contains artifactory artifact location details| +|`azure`|[`AzureArtifact`](#azureartifact)|Azure contains Azure Storage artifact location details| +|`deleted`|`boolean`|Has this been deleted?| |`from`|`string`|From allows an artifact to reference an artifact from a previous step| |`fromExpression`|`string`|FromExpression, if defined, is evaluated to specify the value for the artifact| |`gcs`|[`GCSArtifact`](#gcsartifact)|GCS contains GCS artifact location details| @@ -1978,6 +2072,8 @@ Parameter indicate a passed string parameter to a service template with an optio - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -2278,6 +2374,7 @@ ArtifactLocation describes a location for a single or multiple artifacts. It is |:----------:|:----------:|---------------| |`archiveLogs`|`boolean`|ArchiveLogs indicates if the container logs should be archived| |`artifactory`|[`ArtifactoryArtifact`](#artifactoryartifact)|Artifactory contains artifactory artifact location details| +|`azure`|[`AzureArtifact`](#azureartifact)|Azure contains Azure Storage artifact location details| |`gcs`|[`GCSArtifact`](#gcsartifact)|GCS contains GCS artifact location details| |`git`|[`GitArtifact`](#gitartifact)|Git contains git artifact location details| |`hdfs`|[`HDFSArtifact`](#hdfsartifact)|HDFS contains HDFS artifact location details| @@ -2294,6 +2391,8 @@ _No description available_ Examples with this field (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`graph-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/graph-workflow.yaml) - [`outputs-result-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/outputs-result-workflow.yaml) @@ -2322,6 +2421,8 @@ DAGTemplate is a template subtype for directed acyclic graph templates - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) - [`clustertemplates.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/clustertemplates.yaml) @@ -2425,6 +2526,8 @@ _No description available_ - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`daemon-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/daemon-nginx.yaml) - [`daemon-step.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/daemon-step.yaml) @@ -2448,14 +2551,17 @@ _No description available_ - [`sidecar-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/sidecar-nginx.yaml) - [`sidecar.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/sidecar.yaml) + +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| |`body`|`string`|Body is content of the HTTP Request| +|`bodyFrom`|[`HTTPBodySource`](#httpbodysource)|BodyFrom is content of the HTTP Request as Bytes| |`headers`|`Array<`[`HTTPHeader`](#httpheader)`>`|Headers are an optional list of headers to send with HTTP requests| -|`insecureSkipVerify`|`boolean`|insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client| +|`insecureSkipVerify`|`boolean`|InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client| |`method`|`string`|Method is HTTP methods for HTTP Request| |`successCondition`|`string`|SuccessCondition is an expression if evaluated to true is considered successful| |`timeoutSeconds`|`integer`|TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds| @@ -2475,11 +2581,11 @@ UserContainer is a container specified by a user. ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`args`|`Array< string >`|Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| -|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`args`|`Array< string >`|Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| |`env`|`Array<`[`EnvVar`](#envvar)`>`|List of environment variables to set in the container. Cannot be updated.| |`envFrom`|`Array<`[`EnvFromSource`](#envfromsource)`>`|List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.| -|`image`|`string`|Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| +|`image`|`string`|Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| |`imagePullPolicy`|`string`|Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images| |`lifecycle`|[`Lifecycle`](#lifecycle)|Actions that the management system should take in response to container lifecycle events. Cannot be updated.| |`livenessProbe`|[`Probe`](#probe)|Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes| @@ -2523,10 +2629,14 @@ Inputs are the mechanism for passing parameters, artifacts, volumes from one tem - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`clustertemplates.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/clustertemplates.yaml) @@ -2591,6 +2701,8 @@ Inputs are the mechanism for passing parameters, artifacts, volumes from one tem - [`influxdb-ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/influxdb-ci.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -2661,6 +2773,8 @@ Inputs are the mechanism for passing parameters, artifacts, volumes from one tem - [`suspend-template-outputs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/suspend-template-outputs.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -2726,6 +2840,7 @@ ResourceTemplate is a template subtype to manipulate kubernetes resources |`failureCondition`|`string`|FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed| |`flags`|`Array< string >`|Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ "--validate=false" # disable resource validation]| |`manifest`|`string`|Manifest contains the kubernetes manifest| +|`manifestFrom`|[`ManifestFrom`](#manifestfrom)|ManifestFrom is the source for a single kubernetes manifest| |`mergeStrategy`|`string`|MergeStrategy is the strategy used to merge a patch. It defaults to "strategic" Must be one of: strategic, merge, json| |`setOwnerReference`|`boolean`|SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.| |`successCondition`|`string`|SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step| @@ -2798,11 +2913,11 @@ ScriptTemplate is a template subtype to enable scripting through code steps ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`args`|`Array< string >`|Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| -|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`args`|`Array< string >`|Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| |`env`|`Array<`[`EnvVar`](#envvar)`>`|List of environment variables to set in the container. Cannot be updated.| |`envFrom`|`Array<`[`EnvFromSource`](#envfromsource)`>`|List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.| -|`image`|`string`|Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| +|`image`|`string`|Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| |`imagePullPolicy`|`string`|Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images| |`lifecycle`|[`Lifecycle`](#lifecycle)|Actions that the management system should take in response to container lifecycle events. Cannot be updated.| |`livenessProbe`|[`Probe`](#probe)|Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes| @@ -3056,6 +3171,7 @@ ArtifactRepository represents an artifact repository in which a controller will |:----------:|:----------:|---------------| |`archiveLogs`|`boolean`|ArchiveLogs enables log archiving| |`artifactory`|[`ArtifactoryArtifactRepository`](#artifactoryartifactrepository)|Artifactory stores artifacts to JFrog Artifactory| +|`azure`|[`AzureArtifactRepository`](#azureartifactrepository)|Azure stores artifact in an Azure Storage account| |`gcs`|[`GCSArtifactRepository`](#gcsartifactrepository)|GCS stores artifact in a GCS object store| |`hdfs`|[`HDFSArtifactRepository`](#hdfsartifactrepository)|HDFS stores artifacts in HDFS| |`oss`|[`OSSArtifactRepository`](#ossartifactrepository)|OSS stores artifact in a OSS-compliant object store| @@ -3122,6 +3238,10 @@ ArchiveStrategy describes how to archive files/directory when saving artifacts - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`map-reduce.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/map-reduce.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -3152,6 +3272,28 @@ ArtifactoryArtifact is the location of an artifactory artifact |`url`|`string`|URL of the artifact| |`usernameSecret`|[`SecretKeySelector`](#secretkeyselector)|UsernameSecret is the secret selector to the repository username| +## AzureArtifact + +AzureArtifact is the location of a an Azure Storage artifact + +
+Examples with this field (click to open) +
+ +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`accountKeySecret`|[`SecretKeySelector`](#secretkeyselector)|AccountKeySecret is the secret selector to the Azure Blob Storage account access key| +|`blob`|`string`|Blob is the blob name (i.e., path) in the container where the artifact resides| +|`container`|`string`|Container is the container where resources will be stored| +|`endpoint`|`string`|Endpoint is the service url associated with an account. It is most likely "https://.blob.core.windows.net"| +|`useSDKCreds`|`boolean`|UseSDKCreds tells the driver to figure out credentials based on sdk defaults.| + ## GCSArtifact GCSArtifact is the location of a GCS artifact @@ -3194,6 +3336,7 @@ GitArtifact is the location of an git artifact ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| +|`branch`|`string`|Branch is the branch to fetch when `SingleBranch` is enabled| |`depth`|`integer`|Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip| |`disableSubmodules`|`boolean`|DisableSubmodules disables submodules during git clone| |`fetch`|`Array< string >`|Fetch specifies a number of refs that should be fetched before checkout| @@ -3201,6 +3344,7 @@ GitArtifact is the location of an git artifact |`passwordSecret`|[`SecretKeySelector`](#secretkeyselector)|PasswordSecret is the secret selector to the repository password| |`repo`|`string`|Repo is the git repository| |`revision`|`string`|Revision is the git commit, tag, branch to checkout| +|`singleBranch`|`boolean`|SingleBranch enables single branch clone, using the `branch` parameter| |`sshPrivateKeySecret`|[`SecretKeySelector`](#secretkeyselector)|SSHPrivateKeySecret is the secret selector to the repository ssh private key| |`usernameSecret`|[`SecretKeySelector`](#secretkeyselector)|UsernameSecret is the secret selector to the repository username| @@ -3231,7 +3375,7 @@ HDFSArtifact is the location of an HDFS artifact ## HTTPArtifact -HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container
Examples with this field (click to open) @@ -3241,6 +3385,8 @@ HTTPArtifact allows an file served on HTTP to be placed as an input artifact in - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`daemon-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/daemon-nginx.yaml) - [`daemon-step.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/daemon-step.yaml) @@ -3264,11 +3410,14 @@ HTTPArtifact allows an file served on HTTP to be placed as an input artifact in - [`sidecar-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/sidecar-nginx.yaml) - [`sidecar.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/sidecar.yaml) + +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml)
### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| +|`auth`|[`HTTPAuth`](#httpauth)|Auth contains information for client authentication| |`headers`|`Array<`[`Header`](#header)`>`|Headers are an optional list of headers to send with HTTP requests for artifacts| |`url`|`string`|URL of the artifact| @@ -3484,8 +3633,6 @@ MetricLabel is a single label for a prometheus metric - [`dag-inline-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/dag-inline-workflow.yaml) -- [`data-transformations.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/data-transformations.yaml) - - [`exit-handler-with-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/exit-handler-with-artifacts.yaml) - [`exit-handler-with-param.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/exit-handler-with-param.yaml) @@ -3535,6 +3682,8 @@ _No description available_ Examples with this field (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`graph-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/graph-workflow.yaml) - [`outputs-result-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/outputs-result-workflow.yaml) @@ -3553,12 +3702,12 @@ _No description available_ ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`args`|`Array< string >`|Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| -|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`args`|`Array< string >`|Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| +|`command`|`Array< string >`|Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell| |`dependencies`|`Array< string >`|_No description available_| |`env`|`Array<`[`EnvVar`](#envvar)`>`|List of environment variables to set in the container. Cannot be updated.| |`envFrom`|`Array<`[`EnvFromSource`](#envfromsource)`>`|List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.| -|`image`|`string`|Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| +|`image`|`string`|Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| |`imagePullPolicy`|`string`|Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images| |`lifecycle`|[`Lifecycle`](#lifecycle)|Actions that the management system should take in response to container lifecycle events. Cannot be updated.| |`livenessProbe`|[`Probe`](#probe)|Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes| @@ -3624,6 +3773,8 @@ DAGTask represents a node in the graph during DAG execution - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) - [`clustertemplates.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/clustertemplates.yaml) @@ -3793,10 +3944,26 @@ _No description available_ |:----------:|:----------:|---------------| |`expression`|`string`|Expression defines an expr expression to apply| +## HTTPBodySource + +HTTPBodySource contains the source of the HTTP body. + +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`bytes`|`byte`|_No description available_| + ## HTTPHeader _No description available_ +
+Examples with this field (click to open) +
+ +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) +
+ ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| @@ -3820,6 +3987,15 @@ Cache is the configuration for the type of cache to be used |:----------:|:----------:|---------------| |`configMap`|[`ConfigMapKeySelector`](#configmapkeyselector)|ConfigMap sets a ConfigMap-based cache| +## ManifestFrom + +_No description available_ + +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`artifact`|[`Artifact`](#artifact)|Artifact contains the artifact to use| + ## ContinueOn ContinueOn defines if a workflow should continue even if a task or step fails/errors. It can be specified if the workflow should continue when the pod errors, fails or both. @@ -3923,6 +4099,28 @@ ArtifactoryArtifactRepository defines the controller configuration for an artifa |`repoURL`|`string`|RepoURL is the url for artifactory repo.| |`usernameSecret`|[`SecretKeySelector`](#secretkeyselector)|UsernameSecret is the secret selector to the repository username| +## AzureArtifactRepository + +AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository + +
+Examples with this field (click to open) +
+ +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`accountKeySecret`|[`SecretKeySelector`](#secretkeyselector)|AccountKeySecret is the secret selector to the Azure Blob Storage account access key| +|`blobNameFormat`|`string`|BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables| +|`container`|`string`|Container is the container where resources will be stored| +|`endpoint`|`string`|Endpoint is the service url associated with an account. It is most likely "https://.blob.core.windows.net"| +|`useSDKCreds`|`boolean`|UseSDKCreds tells the driver to figure out credentials based on sdk defaults.| + ## GCSArtifactRepository GCSArtifactRepository defines the controller configuration for a GCS artifact repository @@ -4043,6 +4241,10 @@ NoneStrategy indicates to skip tar process and upload the files or directory tre - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`map-reduce.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/map-reduce.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -4068,10 +4270,35 @@ TarStrategy will tar and gzip the file or directory when saving ZipStrategy will unzip zipped input artifacts +## HTTPAuth + +_No description available_ + +
+Examples with this field (click to open) +
+ +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`basicAuth`|[`BasicAuth`](#basicauth)|_No description available_| +|`clientCert`|[`ClientCertAuth`](#clientcertauth)|_No description available_| +|`oauth2`|[`OAuth2Auth`](#oauth2auth)|_No description available_| + ## Header Header indicate a key-value request header to be used when fetching artifacts over HTTP +
+Examples with this field (click to open) +
+ +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) +
+ ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| @@ -4147,7 +4374,10 @@ ArtifactPaths expands a step from a collection of artifacts |:----------:|:----------:|---------------| |`archive`|[`ArchiveStrategy`](#archivestrategy)|Archive controls how the artifact will be saved to the artifact repository.| |`archiveLogs`|`boolean`|ArchiveLogs indicates if the container logs should be archived| +|`artifactGC`|[`ArtifactGC`](#artifactgc)|ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows| |`artifactory`|[`ArtifactoryArtifact`](#artifactoryartifact)|Artifactory contains artifactory artifact location details| +|`azure`|[`AzureArtifact`](#azureartifact)|Azure contains Azure Storage artifact location details| +|`deleted`|`boolean`|Has this been deleted?| |`from`|`string`|From allows an artifact to reference an artifact from a previous step| |`fromExpression`|`string`|FromExpression, if defined, is evaluated to specify the value for the artifact| |`gcs`|[`GCSArtifact`](#gcsartifact)|GCS contains GCS artifact location details| @@ -4221,6 +4451,63 @@ _No description available_ |:----------:|:----------:|---------------| |`secretKeyRef`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| +## BasicAuth + +BasicAuth describes the secret selectors required for basic authentication + +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`passwordSecret`|[`SecretKeySelector`](#secretkeyselector)|PasswordSecret is the secret selector to the repository password| +|`usernameSecret`|[`SecretKeySelector`](#secretkeyselector)|UsernameSecret is the secret selector to the repository username| + +## ClientCertAuth + +ClientCertAuth holds necessary information for client authentication via certificates + +
+Examples with this field (click to open) +
+ +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`clientCertSecret`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| +|`clientKeySecret`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| + +## OAuth2Auth + +OAuth2Auth holds all information for client authentication via OAuth2 tokens + +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`clientIDSecret`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| +|`clientSecretSecret`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| +|`endpointParams`|`Array<`[`OAuth2EndpointParam`](#oauth2endpointparam)`>`|_No description available_| +|`scopes`|`Array< string >`|_No description available_| +|`tokenURLSecret`|[`SecretKeySelector`](#secretkeyselector)|_No description available_| + +## OAuth2EndpointParam + +EndpointParam is for requesting optional fields that should be sent in the oauth request + +
+Examples with this field (click to open) +
+ +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`key`|`string`|Name is the header name| +|`value`|`string`|Value is the literal value to use for the header| + # External Fields @@ -4242,6 +4529,8 @@ ObjectMeta is metadata that all persisted resources must have, which includes al - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -4252,10 +4541,14 @@ ObjectMeta is metadata that all persisted resources must have, which includes al - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -4402,6 +4695,8 @@ ObjectMeta is metadata that all persisted resources must have, which includes al - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -4448,6 +4743,8 @@ ObjectMeta is metadata that all persisted resources must have, which includes al - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -4550,6 +4847,8 @@ ObjectMeta is metadata that all persisted resources must have, which includes al - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -4716,6 +5015,8 @@ PersistentVolumeClaim is a user's request for and claim to a persistent volume - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`fun-with-gifs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/fun-with-gifs.yaml) @@ -4742,6 +5043,8 @@ Volume represents a named volume in a pod that may be accessed by any container Examples with this field (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`workspace-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/workspace-workflow.yaml) @@ -4872,6 +5175,8 @@ A single application container that you want to run within a pod. - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -4886,6 +5191,8 @@ A single application container that you want to run within a pod. - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`clustertemplates.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/clustertemplates.yaml) @@ -4992,6 +5299,8 @@ A single application container that you want to run within a pod. - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -5032,6 +5341,8 @@ A single application container that you want to run within a pod. - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -5124,6 +5435,8 @@ A single application container that you want to run within a pod. - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -5189,10 +5502,14 @@ VolumeMount describes a mounting of a Volume within a container. Examples with this field (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`workspace-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/workspace-workflow.yaml) @@ -5314,6 +5631,8 @@ ResourceRequirements describes the compute resource requirements. - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`dns-config.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/dns-config.yaml) @@ -5531,6 +5850,8 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and - [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-disable-archive.yaml) +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-gc-workflow.yaml) + - [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing-subpath.yaml) - [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifact-passing.yaml) @@ -5541,10 +5862,14 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and - [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifactory-artifact.yaml) +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/buildkit-template.yaml) - [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-output-artifact.yaml) +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci-workflowtemplate.yaml) + - [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/ci.yaml) - [`cluster-wftmpl-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml) @@ -5691,6 +6016,8 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-azure.yaml) + - [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-gcs.yaml) - [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/input-artifact-git.yaml) @@ -5737,6 +6064,8 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and - [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/node-selector.yaml) +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-azure.yaml) + - [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-gcs.yaml) - [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/output-artifact-s3.yaml) @@ -5839,6 +6168,8 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and - [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/volumes-pvc.yaml) +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) + - [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) - [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) @@ -6001,6 +6332,8 @@ Represents an empty directory for a pod. Empty directory volumes support ownersh Examples with this field (click to open)
+- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/artifacts-workflowtemplate.yaml) + - [`workspace-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/container-set-template/workspace-workflow.yaml) - [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/master/examples/init-container.yaml) diff --git a/docs/high-availability.md b/docs/high-availability.md index 90aa29f77e6c..fb2a245f2cf4 100644 --- a/docs/high-availability.md +++ b/docs/high-availability.md @@ -2,15 +2,15 @@ ## Workflow Controller -Only one controller can run at once. If it crashes, Kubernetes will start another pod. +Before v3.0, only one controller could run at once. (If it crashed, Kubernetes would start another pod.) -> v3.0 +> v3.0 -For many users, a short loss of workflow service maybe acceptable - the new controller will just continue running +For many users, a short loss of workflow service may be acceptable - the new controller will just continue running workflows if it restarts. However, with high service guarantees, new pods may take too long to start running workflows. You should run two replicas, and one of which will be kept on hot-standby. -A voluntary pod disruption can cause both replicas to be replaced at the same time. You should use a Pod Disruption +A voluntary pod disruption can cause both replicas to be replaced at the same time. You should use a Pod Disruption Budget to prevent this and Pod Priority to recover faster from an involuntary pod disruption: * [Pod Disruption Budget](https://kubernetes.io/docs/concepts/workloads/pods/disruptions/#pod-disruption-budgets) @@ -24,4 +24,3 @@ Run a minimum of two replicas, typically three, should be run, otherwise it may !!! Tip Consider using [multi AZ-deployment using pod anti-affinity](https://www.verygoodsecurity.com/blog/posts/kubernetes-multi-az-deployments-using-pod-anti-affinity). - diff --git a/docs/http-template.md b/docs/http-template.md index aa67bfcc5997..771f141d6777 100644 --- a/docs/http-template.md +++ b/docs/http-template.md @@ -1,10 +1,8 @@ # HTTP Template - -> v3.2 and after -`HTTP Template` is a type of template which can execute the HTTP Requests. +> v3.2 and after -### HTTP Template +`HTTP Template` is a type of template which can execute HTTP Requests. ```yaml apiVersion: argoproj.io/v1alpha1 @@ -33,8 +31,8 @@ spec: value: "test-value" # Template will succeed if evaluated to true, otherwise will fail # Available variables: - # request.body: string, the response body - # request.headers: map[string][]string, the response headers + # request.body: string, the request body + # request.headers: map[string][]string, the request headers # response.url: string, the request url # response.method: string, the request method # response.statusCode: int, the response status code @@ -44,7 +42,10 @@ spec: body: "test body" # Change request body ``` -### Argo Agent +## Argo Agent + HTTP Templates use the Argo Agent, which executes the requests independently of the controller. The Agent and the Workflow Controller communicate through the `WorkflowTaskSet` CRD, which is created for each running `Workflow` that requires the use -of the `Agent`. \ No newline at end of file +of the `Agent`. + +In order to use the Argo Agent, you will need to ensure that you have added the appropriate [workflow RBAC](workflow-rbac.md) to add an agent role with to Argo Workflows. An example agent role can be found in [the quick-start manifests](https://github.com/argoproj/argo-workflows/tree/master/manifests/quick-start/base/agent-role.yaml). diff --git a/docs/ide-setup.md b/docs/ide-setup.md index b6dffe9090d3..2e5f4f4472b1 100644 --- a/docs/ide-setup.md +++ b/docs/ide-setup.md @@ -12,11 +12,11 @@ Configure your IDE to reference the Argo schema and map it to your Argo YAML fil ![JetBrains IDEs Configure Schema](assets/jetbrains-ide-step-1-config.png) -- The schema is located at [https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). +- The schema is located [here](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). - Specify a file glob pattern that locates **your** Argo files. The example glob here is for the Argo Github project! - Note that you may need to restart IDEA to pick up the changes. -That's it. Open an Argo YAML file and you should see smarter behaviour, including type errors and context-sensitive autocomplete. +That's it. Open an Argo YAML file and you should see smarter behavior, including type errors and context-sensitive auto-complete. ![JetBrains IDEs Example Functionality](assets/jetbrains-ide-step-1-example-functionality.png) @@ -24,7 +24,7 @@ That's it. Open an Argo YAML file and you should see smarter behaviour, includin If you have the [JetBrains Kubernetes Plugin](https://plugins.jetbrains.com/plugin/10485-kubernetes) installed in your IDE, the validation can be configured in the Kubernetes plugin settings -instead of using the internal JSON schema file validator. +instead of using the internal JSON schema file validator. ![JetBrains IDEs Configure Schema with Kubernetes Plugin](assets/jetbrains-ide-step-1-kubernetes-config.png) @@ -32,18 +32,18 @@ Unlike the previous JSON schema validation method, the plugin detects the necess based on Kubernetes resource definition keys and does not require a file glob pattern. Like the previously described method: -- The schema is located at [https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). +- The schema is located [here](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). - Note that you may need to restart IDEA to pick up the changes. ### VSCode -The [Red Hat YAML](https://github.com/redhat-developer/vscode-yaml) plugin will provide error highlighting and autocompletion for Argo resources. +The [Red Hat YAML](https://github.com/redhat-developer/vscode-yaml) plugin will provide error highlighting and auto-completion for Argo resources. Install the Red Hat YAML plugin in VSCode and open extension settings: ![VSCode Install Plugin](assets/vscode-ide-step-1-install-plugin.png) -Open the YAML schemas settings: +Open the YAML schema settings: ![VSCode YAML Schema Settings](assets/vscode-ide-step-2-schema-settings.png) @@ -51,10 +51,10 @@ Add the Argo schema setting `yaml.schemas`: ![VSCode Specify Argo Schema](assets/vscode-ide-step-3-spec-schema.png) -- The schema is located at [https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). +- The schema is located [here](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/jsonschema/schema.json). - Specify a file glob pattern that locates **your** Argo files. The example glob here is for the Argo Github project! -- Note that other defined schemas with overlapping glob patterns may cause errors. +- Note that other defined schema with overlapping glob patterns may cause errors. -That's it. Open an Argo YAML file and you should see smarter behaviour, including type errors and context-sensitive autocomplete. +That's it. Open an Argo YAML file and you should see smarter behavior, including type errors and context-sensitive auto-complete. ![VScode Example Functionality](assets/vscode-ide-step-4-example-functionality.png) diff --git a/docs/templates.md b/docs/inline-templates.md similarity index 54% rename from docs/templates.md rename to docs/inline-templates.md index c06e075df5ff..03210fc3f802 100644 --- a/docs/templates.md +++ b/docs/inline-templates.md @@ -1,12 +1,4 @@ -# Templates - -See [core concepts](core-concepts.md) for DAG, steps, container templates. - -## Container Set Template - -See [container set template](container-set-template.md). - -## Inline Templates +# Inline Templates > v3.2 and after @@ -18,4 +10,4 @@ Examples: * [Steps](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/steps-inline-workflow.yaml) !!! Warning - You can only inline once. Inlining a DAG within a DAG will not work. + You can only inline once. Inline a DAG within a DAG will not work. diff --git a/docs/installation.md b/docs/installation.md index 99d64581c542..3e591d145082 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,30 +1,40 @@ # Installation -## Argo on Desktop +## Non-production installation -Use the [quick-start manifests](quick-start.md). +If you just want to try out Argo Workflows in a non-production environment (including on desktop via minikube/kind/k3d etc) follow the [quick-start guide](quick-start.md). -## Argo in Production +## Production installation -Determine your base installation option. +### Installation Methods -* A **cluster install** will watch and execute workflows in all namespaces. -* A **namespace install** only executes workflows in the namespace it is installed in (typically `argo`). -* A **managed namespace install**: only executes workflows in a specific namespace ([learn more](managed-namespace.md)). +#### Official release manifests -⚠️ `latest` is tip, not stable. Never run it. Make sure you're using the manifests attached to each Github release. See [this link](https://github.com/argoproj/argo-workflows/releases/latest) for the most recent manifests. +To install Argo Workflows, navigate to the [releases page](https://github.com/argoproj/argo-workflows/releases/latest) and find the release you wish to use (the latest full release is preferred). Scroll down to the `Controller and Server` section and execute the `kubectl` commands. -⚠️ Double-check you have the right version of your executor configured, it's easy to miss. +You can use Kustomize to patch your preferred [configurations](managed-namespace.md) on top of the base manifest. -⚠️ If you are using GitOps. Never use Kustomize remote base, this is dangerous. Instead, copy the manifests into your Git repo. +⚠️ If you are using GitOps, never use Kustomize remote base: this is dangerous. Instead, copy the manifests into your Git repo. -Review the following: +⚠️ `latest` is tip, not stable. Never run it in production. + +#### Argo Workflows Helm Chart + +You can install Argo Workflows using the community maintained [Helm charts](https://github.com/argoproj/argo-helm). - * [Security](security.md). - * [Scaling](scaling.md) and [running at massive scale](running-at-massive-scale.md). - * [High-availability](high-availability.md) - * [Disaster recovery](disaster-recovery.md) +## Installation options -Read the [upgrading guide](upgrading.md) before any major upgrade to be aware of breaking changes. +Determine your base installation option. + +* A **cluster install** will watch and execute workflows in all namespaces. This is the default installation option when installing using the official release manifests. +* A **namespace install** only executes workflows in the namespace it is installed in (typically `argo`). Look for `namespace-install.yaml` in the [release assets](https://github.com/argoproj/argo-workflows/releases/latest). +* A **managed namespace install**: only executes workflows in a specific namespace ([learn more](managed-namespace.md)). +## Additional installation considerations + +Review the following: +* [Security](security.md). +* [Scaling](scaling.md) and [running at massive scale](running-at-massive-scale.md). +* [High-availability](high-availability.md) +* [Disaster recovery](disaster-recovery.md) diff --git a/docs/intermediate-inputs.md b/docs/intermediate-inputs.md new file mode 100644 index 000000000000..c3bf889d1c87 --- /dev/null +++ b/docs/intermediate-inputs.md @@ -0,0 +1,138 @@ +# Intermediate Parameters + +> v3.4 and after + +Traditionally, Argo workflows has supported input parameters from UI only when the workflow starts, +and after that, it's pretty much on autopilot. But, there are a lot of use cases where human interaction is required. + +This interaction is in the form of providing input text in the middle of the workflow, choosing from a dropdown of the options which a workflow step itself is intelligently generating. + +A similar feature which you can see in jenkins is [pipeline-input-step](https://www.jenkins.io/doc/pipeline/steps/pipeline-input-step/) + +Example use cases include: + +- A human approval before doing something in production environment. +- Programmatic generation of a list of inputs from which the user chooses. +Choosing from a list of available databases which the workflow itself is generating. + +This feature is achieved via `suspend template`. + +The workflow will pause at a `Suspend` node, and user will be able to update parameters using fields type text or dropdown. + +## Intermediate Parameters Approval Example + +- The below example shows static enum values `approval` step. +- The user will be able to choose between `[YES, NO]` which will be used in subsequent steps. + +[![Approval Example Demo](https://img.youtube.com/vi/eyeZ2oddwWE/0.jpg)](https://youtu.be/eyeZ2oddwWE) + +```yaml + +entrypoint: cicd-pipeline +templates: + - name: cicd-pipeline + steps: + - - name: deploy-pre-prod + template: deploy + - - name: approval + template: approval + - - name: deploy-prod + template: deploy + when: '{{steps.approval.outputs.parameters.approve}} == YES' + - name: approval + suspend: {} + inputs: + parameters: + - name: approve + default: 'NO' + enum: + - 'YES' + - 'NO' + outputs: + parameters: + - name: approve + valueFrom: + supplied: {} + - name: deploy + container: + image: 'argoproj/argosay:v2' + command: + - /argosay + args: + - echo + - deploying +``` + +## Intermediate Parameters DB Schema Update Example + +- The below example shows programmatic generation of `enum` values. +- The `generate-db-list` template generates an output called `db_list`. +- This output is of type `json`. +- Since this `json` has a `key` called `enum`, with an array of options, the UI will parse this and display it as a dropdown. +- The output can be any string also, in which case the UI will display it as a text field. Which the user can later edit. + +[![DB Schema Update Example Demo](https://img.youtube.com/vi/QgE-1782YJc/0.jpg)](https://youtu.be/QgE-1782YJc) + +```yaml +entrypoint: db-schema-update +templates: + - name: db-schema-update + steps: + - - name: generate-db-list + template: generate-db-list + - - name: choose-db + template: choose-db + arguments: + parameters: + - name: db_name + value: '{{steps.generate-db-list.outputs.parameters.db_list}}' + - - name: update-schema + template: update-schema + arguments: + parameters: + - name: db_name + value: '{{steps.choose-db.outputs.parameters.db_name}}' + - name: generate-db-list + outputs: + parameters: + - name: db_list + valueFrom: + path: /tmp/db_list.txt + container: + name: main + image: 'argoproj/argosay:v2' + command: + - sh + - '-c' + args: + - >- + echo "{\"enum\": [\"db1\", \"db2\", \"db3\"]}" | tee /tmp/db_list.txt + - name: choose-db + inputs: + parameters: + - name: db_name + outputs: + parameters: + - name: db_name + valueFrom: + supplied: {} + suspend: {} + - name: update-schema + inputs: + parameters: + - name: db_name + container: + name: main + image: 'argoproj/argosay:v2' + command: + - sh + - '-c' + args: + - echo Updating DB {{inputs.parameters.db_name}} +``` + +### Some Important Details + +- The suspended node should have the **SAME** parameters defined in `inputs.parameters` and `outputs.parameters`. +- All the output parameters in the suspended node should have `valueFrom.supplied: {}` +- The selected values will be available at `.outputs.parameters.` diff --git a/docs/key-only-artifacts.md b/docs/key-only-artifacts.md index 7a3519075eb3..2ace452d120f 100644 --- a/docs/key-only-artifacts.md +++ b/docs/key-only-artifacts.md @@ -1,18 +1,17 @@ # Key-Only Artifacts - > v3.0 and after A key-only artifact is an input or output artifact where you only specific the key, omitting the bucket, secrets etc. When these are omitted, the bucket/secrets from the configured artifact repository is used. This allows you to move the configuration of the artifact repository out of the workflow specification. -This is closely related to [artifact repository ref](artifact-repository-ref.md). You'll want to use them together for maximum benefit. +This is closely related to [artifact repository ref](artifact-repository-ref.md). You'll want to use them together for maximum benefit. This should probably be your default if you're using v3.0: * Reduces the size of workflows (improved performance). -* User owned artifact repository set-up configuration (simplified management). +* User owned artifact repository set-up configuration (simplified management). * Decouples the artifact location configuration from the workflow. Allowing you to re-configure the artifact repository without changing your workflows or templates. Example: @@ -57,4 +56,4 @@ spec: ``` !!! WARNING - The location data is not longer stored in `/status/nodes`. Any tooling that relies on this will need to be updated. \ No newline at end of file + The location data is not longer stored in `/status/nodes`. Any tooling that relies on this will need to be updated. diff --git a/docs/kubectl.md b/docs/kubectl.md index 38521a486503..d9f54b15b90b 100644 --- a/docs/kubectl.md +++ b/docs/kubectl.md @@ -1,14 +1,13 @@ -# Kubectl +# `kubectl` You can also create Workflows directly with `kubectl`. However, the Argo CLI offers extra features that `kubectl` does not, such as YAML validation, workflow visualization, parameter passing, retries and resubmits, suspend and resume, and more. -```sh +```bash kubectl create -n argo -f https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-world.yaml kubectl get wf -n argo kubectl get wf hello-world-xxx -n argo kubectl get po -n argo --selector=workflows.argoproj.io/workflow=hello-world-xxx kubectl logs hello-world-yyy -c main -n argo ``` - diff --git a/docs/lifecyclehook.md b/docs/lifecyclehook.md index e555e568b3b4..2da5f0f93111 100644 --- a/docs/lifecyclehook.md +++ b/docs/lifecyclehook.md @@ -1,4 +1,4 @@ -# LifecycleHook +# Lifecycle-Hook > v3.3 and after @@ -9,16 +9,18 @@ A [`LifecycleHook`](https://argoproj.github.io/argo-workflows/fields/#lifecycleh In other words, a `LifecycleHook` functions like an [exit handler](https://github.com/argoproj/argo-workflows/blob/master/examples/exit-handlers.yaml) with a conditional expression. **Workflow-level `LifecycleHook`**: Executes the workflow when a configured expression is met. -- [Workflow-level LifecycleHook example](https://github.com/argoproj/argo-workflows/blob/45730a9cdeb588d0e52b1ac87b6e0ca391a95a81/examples/life-cycle-hooks-wf-level.yaml) -**Template-level LifecycleHook**: Executes the template when a configured expression is met. -- [Template-level LifecycleHook example](https://github.com/argoproj/argo-workflows/blob/45730a9cdeb588d0e52b1ac87b6e0ca391a95a81/examples/life-cycle-hooks-tmpl-level.yaml) +- [Workflow-level Lifecycle-Hook example](https://github.com/argoproj/argo-workflows/blob/45730a9cdeb588d0e52b1ac87b6e0ca391a95a81/examples/life-cycle-hooks-wf-level.yaml) + +**Template-level Lifecycle-Hook**: Executes the template when a configured expression is met. + +- [Template-level Lifecycle-Hook example](https://github.com/argoproj/argo-workflows/blob/45730a9cdeb588d0e52b1ac87b6e0ca391a95a81/examples/life-cycle-hooks-tmpl-level.yaml) ## Supported conditions - [Exit handler variables](https://github.com/argoproj/argo-workflows/blob/ebd3677c7a9c973b22fa81ef3b409404a38ec331/docs/variables.md#exit-handler): `workflow.status` and `workflow.failures` - [`template`](https://argoproj.github.io/argo-workflows/fields/#template) -- [`templateRef`](https://argoproj.github.io/argo-workflows/fields/#templateref) +- [`templateRef`](https://argoproj.github.io/argo-workflows/fields/#templateref) - [`arguments`](https://github.com/argoproj/argo-workflows/blob/master/examples/conditionals.yaml) ## Unsupported conditions diff --git a/docs/links.md b/docs/links.md index dfec1e82ecfa..08b0574ce6fa 100644 --- a/docs/links.md +++ b/docs/links.md @@ -1,33 +1,31 @@ # Links -![GA](assets/ga.svg) - > v2.7 and after You can configure Argo Server to show custom links: -* A "Get Help" button in the bottom right of the window linking to you organisation help pages or chat room. -* Deep-links to your facilities (e.g. logging facility) in the user interface for both the workflow and each workflow pod. +* A "Get Help" button in the bottom right of the window linking to you organization help pages or chat room. +* Deep-links to your facilities (e.g. logging facility) in the UI for both the workflow and each workflow pod. Links can contain placeholder variables. Placeholder variables are indicated by the dollar sign and curly braces: `${variable}`. These are the commonly used variables: -- `${metadata.namespace}`: Kubernetes namespace of the current workflow / pod / event source / sensor -- `${metadata.name}`: Name of the current workflow / pod / event source / sensor -- `${status.startedAt}`: Start timestamp of the workflow / pod, in the format of `2021-01-01T10:35:56Z` -- `${status.finishedAt}`: End timestamp of the workflow / pod, in the format of `2021-01-01T10:35:56Z`. If the workflow/pod is still running, this variable will be `null` +* `${metadata.namespace}`: Kubernetes namespace of the current workflow / pod / event source / sensor +* `${metadata.name}`: Name of the current workflow / pod / event source / sensor +* `${status.startedAt}`: Start time-stamp of the workflow / pod, in the format of `2021-01-01T10:35:56Z` +* `${status.finishedAt}`: End time-stamp of the workflow / pod, in the format of `2021-01-01T10:35:56Z`. If the workflow/pod is still running, this variable will be `null` See [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) for a complete example > v3.1 and after -Epoch timestamps are available now. These are useful if we want to add links to logging facilities like [Grafana](https://grafana.com/) -or [DataDog](https://datadoghq.com/), as they support Unix epoch timestamp formats as URL +Epoch time-stamps are available now. These are useful if we want to add links to logging facilities like [Grafana](https://grafana.com/) +or [DataDog](https://datadoghq.com/), as they support Unix epoch time-stamp formats as URL parameters: -- `${status.startedAtEpoch}`: Start timestamp of the workflow/pod, in the Unix epoch time format in **milliseconds**, e.g. `1609497000000`. -- `${status.finishedAtEpoch}`: End timestamp of the workflow/pod, in the Unix epoch time format in **milliseconds**, e.g. `1609497000000`. If the workflow/pod is still running, this variable will represent the currnet time. +* `${status.startedAtEpoch}`: Start time-stamp of the workflow/pod, in the Unix epoch time format in **milliseconds**, e.g. `1609497000000`. +* `${status.finishedAtEpoch}`: End time-stamp of the workflow/pod, in the Unix epoch time format in **milliseconds**, e.g. `1609497000000`. If the workflow/pod is still running, this variable will represent the current time. > v3.1 and after diff --git a/docs/managed-namespace.md b/docs/managed-namespace.md index 202bbca8e38b..6c892e0f0fbd 100644 --- a/docs/managed-namespace.md +++ b/docs/managed-namespace.md @@ -1,20 +1,18 @@ # Managed Namespace -![GA](assets/ga.svg) - > v2.5 and after You can install Argo in either cluster scoped or namespace scope configurations. This dictates if you must set-up cluster roles or normal roles. In namespace scope configuration, you must run both the Workflow Controller and -Argo Server using `--namespaced`. If you would like to have the workflows running in a separate -namespace, add `--managed-namespace` as well. (In cluster scope installation, don't include `--namespaced` +Argo Server using `--namespaced`. If you would like to have the workflows running in a separate +namespace, add `--managed-namespace` as well. (In cluster scope installation, don't include `--namespaced` or `--managed-namespace`.) For example: -``` +```yaml - args: - --configmap - workflow-controller-configmap @@ -25,4 +23,4 @@ For example: - default ``` -Please mind that both cluster scoped and namespace scoped configurations require "admin" role because some custom resource (CRD) must be created (and CRD is always a cluster level object) +Please mind that both cluster scoped and namespace scoped configurations require "admin" role because some custom resource (CRD) must be created (and CRD is always a cluster level object) diff --git a/docs/memoization.md b/docs/memoization.md index 81a473493d20..8945013433d1 100644 --- a/docs/memoization.md +++ b/docs/memoization.md @@ -4,44 +4,42 @@ ## Introduction -Workflows often have outputs that are expensive to compute. -This feature reduces cost and workflow execution time by memoizing previously run steps: +Workflows often have outputs that are expensive to compute. +This feature reduces cost and workflow execution time by memoizing previously run steps: it stores the outputs of a template into a specified cache with a variable key. ## Cache Method -Currently, caching can only be performed with ConfigMaps. +Currently, caching can only be performed with config-maps. This allows you to easily manipulate cache entries manually through `kubectl` and the Kubernetes API without having to go through Argo. -## Using Memoization +## Using Memoization -Memoization is set at the template level. You must specify a key, which can be static strings but more often depend on inputs. -You must also specify a name for the ConfigMap cache. +Memoization is set at the template level. You must specify a key, which can be static strings but more often depend on inputs. +You must also specify a name for the config-map cache. -``` +```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: - generateName: memoized-workflow- + generateName: memoized-workflow- spec: - entrypoint: whalesay - templates: - - name: whalesay - memoize: - key: "{{inputs.parameters.message}}" - cache: - configMap: - name: whalesay-cache - -... + entrypoint: whalesay + templates: + - name: whalesay + memoize: + key: "{{inputs.parameters.message}}" + cache: + configMap: + name: whalesay-cache ``` -!!! Note +!!! Note In order to use memoization it is necessary to add the verbs `create` and `update` to the `configmaps` resource for the appropriate (cluster) roles. In the case of a cluster install the `argo-cluster-role` cluster role should be updated, whilst for a namespace install the `argo-role` role should be updated. -## FAQs +## FAQ -1. If you see errors like `"error creating cache entry: ConfigMap \"reuse-task\" is invalid: []: Too long: must have at most 1048576 characters"`, +1. If you see errors like `error creating cache entry: ConfigMap \"reuse-task\" is invalid: []: Too long: must have at most 1048576 characters`, this is due to [the 1MB limit placed on the size of `ConfigMap`](https://github.com/kubernetes/kubernetes/issues/19781). Here are a couple of ways that might help resolve this: * Delete the existing `ConfigMap` cache or switch to use a different cache. diff --git a/docs/mentoring.md b/docs/mentoring.md index 37a7c0f78797..a8db6cb00fb7 100644 --- a/docs/mentoring.md +++ b/docs/mentoring.md @@ -9,7 +9,7 @@ Mentors will help you with things like: * Understand key concepts and learn your way around the source code * Getting your first pull request with a code contribution created, reviewed and merged. -Mentors and mentees must abide by the [code of conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). +Mentors and mentees must abide by the [code of conduct](https://github.com/cncf/foundation/blob/main/code-of-conduct.md). ## How To Get A Mentor @@ -19,9 +19,9 @@ To set expectations: 1. Create [a mentoring request](https://github.com/argoproj/argo-workflows/issues/new?assignees=&labels=mentoring%2Ctriage&template=mentoring_request.md) with the title "I would like a mentor" and answer the questions in the template. 2. A mentor will comment offering to mentor you. It may take a few days for a mentor to respond. -3. You comment to accept or decline the offer. +3. You comment to accept or decline the offer. 4. If you accept, then your new mentor will assign the issue to themself. -5. The mentor may suggest resources to learn Golang or React, or discus the changes that'll fix your bug or enhancement needs. +5. The mentor may suggest resources to learn Golang or React, or discus the changes that will fix your bug or enhancement needs. 6. Work together the create, approve, and merge your pull request. 7. Once the PR is closed, then the issues is closed. The relationship ends. Congratulations! @@ -57,4 +57,4 @@ For program specific questions, please refer to [GSoC FAQ](https://developers.go If you have any problems with your relationship, please contact us: * alex_collins@intuit.com -* terrytangyuan@gmail.com \ No newline at end of file +* terrytangyuan@gmail.com diff --git a/docs/metrics.md b/docs/metrics.md index a8817bdb1274..69c23d778a79 100644 --- a/docs/metrics.md +++ b/docs/metrics.md @@ -1,7 +1,5 @@ # Prometheus Metrics -![GA](assets/ga.svg) - > v2.7 and after ## Introduction @@ -9,7 +7,7 @@ Argo emits a certain number of controller metrics that inform on the state of the controller at any given time. Furthermore, users can also define their own custom metrics to inform on the state of their Workflows. -Custom prometheus metrics can be defined to be emitted on a `Workflow`- and `Template`-level basis. These can be useful +Custom Prometheus metrics can be defined to be emitted on a `Workflow`- and `Template`-level basis. These can be useful for many cases; some examples: - Keeping track of the duration of a `Workflow` or `Template` over time, and setting an alert if it goes beyond a threshold @@ -23,12 +21,13 @@ best way to define metrics in Argo to avoid problems such as [cardinality explos There are two kinds of metrics emitted by Argo: **controller metrics** and **custom metrics**. -#### Controller metrics +### Controller metrics + Metrics that inform on the state of the controller; i.e., they answer the question "What is the state of the controller right now?" Default controller metrics can be scraped from service ```workflow-controller-metrics``` at the endpoint ```:9090/metrics``` - -#### Custom metrics +### Custom metrics + Metrics that inform on the state of a Workflow, or a series of Workflows. These custom metrics are defined by the user in the Workflow spec. Emitting custom metrics is the responsibility of the emitter owner. Since the user defines Workflows in Argo, the user is responsible @@ -63,61 +62,61 @@ a way to view and analyze historical data, consider the [workflow archive](workf Metrics for the Four Golden Signals are: -* Latency: `argo_workflows_queue_latency` -* Traffic: `argo_workflows_count` and `argo_workflows_queue_depth_count` -* Errors: `argo_workflows_count` and `argo_workflows_error_count` -* Saturation: `argo_workflows_workers_busy` and `argo_workflows_workflow_condition` +- Latency: `argo_workflows_queue_latency` +- Traffic: `argo_workflows_count` and `argo_workflows_queue_depth_count` +- Errors: `argo_workflows_count` and `argo_workflows_error_count` +- Saturation: `argo_workflows_workers_busy` and `argo_workflows_workflow_condition` -#### argo_pod_missing +#### `argo_pod_missing` Pods were not seen. E.g. by being deleted by Kubernetes. You should only see this under high load. !!! NOTE This metric's name starts with `argo_` not `argo_workflows_`. -#### argo_workflows_count +#### `argo_workflows_count` Number of workflow in each phase. The `Running` count does not mean that a workflows pods are running, just that the controller has scheduled them. A workflow can be stuck in `Running` with pending pods for a long time. -#### argo_workflows_error_count +#### `argo_workflows_error_count` A count of certain errors incurred by the controller. -#### argo_workflows_k8s_request_total +#### `argo_workflows_k8s_request_total` Number of API requests sent to the Kubernetes API. -#### argo_workflows_operation_duration_seconds +#### `argo_workflows_operation_duration_seconds` A histogram of durations of operations. -#### argo_workflows_pods_count +#### `argo_workflows_pods_count` -It is possible for a workflow to start, but no pods be running (e.g. cluster is too busy to run them). This metric sheds light on actual work being done. +It is possible for a workflow to start, but no pods be running (e.g. cluster is too busy to run them). This metric sheds light on actual work being done. -#### argo_workflows_queue_adds_count +#### `argo_workflows_queue_adds_count` The number of additions to the queue of workflows or cron workflows. -#### argo_workflows_queue_depth_count +#### `argo_workflows_queue_depth_count` The depth of the queue of workflows or cron workflows to be processed by the controller. -#### argo_workflows_queue_latency +#### `argo_workflows_queue_latency` The time workflows or cron workflows spend in the queue waiting to be processed. -#### argo_workflows_workers_busy +#### `argo_workflows_workers_busy` The number of workers that are busy. -#### argo_workflows_workflow_condition +#### `argo_workflows_workflow_condition` The number of workflow with different conditions. This will tell you the number of workflows with running pods. -#### argo_workflows_workflows_processed_count +#### `argo_workflows_workflows_processed_count` A count of all Workflow updates processed by the controller. @@ -131,7 +130,7 @@ In order to analyze the behavior of a workflow over time, we need to be able to (i.e. individual executions) of a workflow together into a "series" for the purposes of emitting metrics. We do so by linking them together with the same metric descriptor. -In prometheus, a metric descriptor is defined as a metric's name and its key-value labels. For example, for a metric +In Prometheus, a metric descriptor is defined as a metric's name and its key-value labels. For example, for a metric tracking the duration of model execution over time, a metric descriptor could be: `argo_workflows_model_exec_time{model_name="model_a",phase="validation"}` @@ -155,7 +154,7 @@ Please see the [Argo Workflows metrics](https://grafana.com/grafana/dashboards/1 ## Defining metrics Metrics are defined in-place on the Workflow/Step/Task where they are emitted from. Metrics are always processed _after_ -the Workflow/Step/Task completes, with the exception of [realtime metrics](#realtime-metrics). +the Workflow/Step/Task completes, with the exception of [real-time metrics](#real-time-metrics). Metric definitions **must** include a `name` and a `help` doc string. They can also include any number of `labels` (when defining labels avoid cardinality explosion). Metrics with the same `name` **must always** use the same exact `help` string, @@ -279,16 +278,15 @@ Finally, an example of a `Template`-level Histogram metric that tracks an intern ... ``` -### Realtime metrics +### Real-Time Metrics -Argo supports a limited number of real-time metrics. These metrics are emitted in realtime, beginning when the step execution starts -and ending when it completes. Realtime metrics are only available on Gauge type metrics and with a [limited number of variables](variables.md#realtime-metrics). +Argo supports a limited number of real-time metrics. These metrics are emitted in real-time, beginning when the step execution starts +and ending when it completes. Real-time metrics are only available on Gauge type metrics and with a [limited number of variables](variables.md#realtime-metrics). -To define a realtime metric simply add `realtime: true` to a gauge metric with a valid realtime variable. For example: +To define a real-time metric simply add `realtime: true` to a gauge metric with a valid real-time variable. For example: ```yaml gauge: realtime: true value: "{{duration}}" ``` - diff --git a/docs/node-field-selector.md b/docs/node-field-selector.md index 086f5cf04e2c..b1f11296b978 100644 --- a/docs/node-field-selector.md +++ b/docs/node-field-selector.md @@ -1,12 +1,10 @@ # Node Field Selectors -![GA](assets/ga.svg) - > v2.8 and after ## Introduction -The resume, stop and retry Argo CLI and API commands support a `--node-field-selector` parameter to allow the user to select a subset of nodes for the command to apply to. +The resume, stop and retry Argo CLI and API commands support a `--node-field-selector` parameter to allow the user to select a subset of nodes for the command to apply to. In the case of the resume and stop commands these are the nodes that should be resumed or stopped. @@ -14,7 +12,9 @@ In the case of the retry command it allows specifying nodes that should be resta The format of this when used with the CLI is: -```--node-field-selector=FIELD=VALUE``` +```bash +--node-field-selector=FIELD=VALUE +``` ## Possible options @@ -25,26 +25,30 @@ The field can be any of: | `displayName`| Display name of the node. This is the name of the node as it is displayed on the CLI or UI, without considering its ancestors (see example below). This is a useful shortcut if there is only one node with the same `displayName` | | `name`| Full name of the node. This is the full name of the node, including its ancestors (see example below). Using `name` is necessary when two or more nodes share the same `displayName` and disambiguation is required. | | `templateName`| Template name of the node | -| `phase`| Phase status of the node - eg Running | -| `templateRef.name`| The name of the WorkflowTemplate the node is referring to | -| `templateRef.template`| The template within the WorkflowTemplate the node is referring to | +| `phase`| Phase status of the node - e.g. Running | +| `templateRef.name`| The name of the workflow template the node is referring to | +| `templateRef.template`| The template within the workflow template the node is referring to | | `inputs.parameters..value`| The value of input parameter NAME | -The operator can be '=' or '!='. Multiple selectors can be combined with a comma, in which case they are ANDed together. +The operator can be '=' or '!='. Multiple selectors can be combined with a comma, in which case they are anded together. ## Examples To filter for nodes where the input parameter 'foo' is equal to 'bar': -```--node-field-selector=inputs.parameters.foo.value=bar``` +```bash +--node-field-selector=inputs.parameters.foo.value=bar +``` To filter for nodes where the input parameter 'foo' is equal to 'bar' and phase is not running: -```--node-field-selector=foo1=bar1,phase!=Running``` +```bash +--node-field-selector=foo1=bar1,phase!=Running +``` Consider the following workflow: -``` +```text ● appr-promotion-ffsv4 code-release ├─✔ start sample-template/email appr-promotion-ffsv4-3704914002 2s ├─● app1 wftempl1/approval-and-promotion @@ -63,7 +67,7 @@ Here we have two steps with the same `displayName`: `wait-approval`. To select o `name`, either `appr-promotion-ffsv4.app1.wait-approval` or `appr-promotion-ffsv4.app3.wait-approval`. If it is not clear what the full name of a node is, it can be found using `kubectl`: -``` +```bash $ kubectl get wf appr-promotion-ffsv4 -o yaml ... diff --git a/docs/offloading-large-workflows.md b/docs/offloading-large-workflows.md index 0180e4ee68cf..2111be6711b6 100644 --- a/docs/offloading-large-workflows.md +++ b/docs/offloading-large-workflows.md @@ -1,30 +1,28 @@ # Offloading Large Workflows -![GA](assets/ga.svg) - > v2.4 and after -Argo stores workflows as Kubernetes resources (i.e. within EtcD). This creates a limit to their size as resources must be under 1MB. Each resource includes the status of each node, which is stored in the `/status/nodes` field for the resource. This can be over 1MB. If this happens, we try and compress the node status and store it in `/status/compressedNodes`. If the status is still too large, we then try and store it in an SQL database. +Argo stores workflows as Kubernetes resources (i.e. within EtcD). This creates a limit to their size as resources must be under 1MB. Each resource includes the status of each node, which is stored in the `/status/nodes` field for the resource. This can be over 1MB. If this happens, we try and compress the node status and store it in `/status/compressedNodes`. If the status is still too large, we then try and store it in an SQL database. To enable this feature, configure a Postgres or MySQL database under `persistence` in [your configuration](workflow-controller-configmap.yaml) and set `nodeStatusOffLoad: true`. ## FAQ -#### Why aren't my workflows appearing in the database? +### Why aren't my workflows appearing in the database? Offloading is expensive and often unnecessary, so we only offload when we need to. Your workflows aren't probably large enough. -#### Error "Failed to submit workflow: etcdserver: request is too large." +### Error `Failed to submit workflow: etcdserver: request is too large.` You must use the Argo CLI having exported `export ARGO_SERVER=...`. -#### Error "offload node status is not supported" +### Error `offload node status is not supported` Even after compressing node statuses, the workflow exceeded the EtcD size limit. To resolve, either enable node status offload as described above or look for ways to reduce the size of your workflow manifest: -- Use `withItems` or `withParams` to consolidate similar templates into a single parameterized template +- Use `withItems` or `withParams` to consolidate similar templates into a single parametrized template - Use [template defaults](https://argoproj.github.io/argo-workflows/template-defaults/) to factor shared template options to the workflow level - Use [workflow templates](https://argoproj.github.io/argo-workflows/workflow-templates/) to factor frequently-used templates into separate resources - Use [workflows of workflows](https://argoproj.github.io/argo-workflows/workflow-of-workflows/) to factor a large workflow into a workflow of smaller workflows diff --git a/docs/overrides/main.html b/docs/overrides/main.html new file mode 100644 index 000000000000..ecea080f7e92 --- /dev/null +++ b/docs/overrides/main.html @@ -0,0 +1,24 @@ +{% extends "base.html" %} +{% block content %} +{{ super() }} + + +

{{ lang.t("meta.comments") }}

+ + + +{% endblock %} \ No newline at end of file diff --git a/docs/plugin-directory.md b/docs/plugin-directory.md index dbc184a96e93..67a4a0c32e96 100644 --- a/docs/plugin-directory.md +++ b/docs/plugin-directory.md @@ -11,3 +11,4 @@ | [Python](https://github.com/argoproj-labs/argo-workflows-python-executor-plugin) | Plugin for executing Python | | [Hermes](https://github.com/kjagiello/hermes) | Send notifications, e.g. Slack | | [WASM](https://github.com/Shark/wasm-workflows-plugin) | Run Web Assembly (WASM) tasks | +| [Chaos Mesh Plugin](https://github.com/xlgao-zju/argo-chaos-mesh-plugin) | Run Chaos Mesh experiment | diff --git a/docs/progress.md b/docs/progress.md index f94cbb81a71d..449e6aa7f75f 100644 --- a/docs/progress.md +++ b/docs/progress.md @@ -4,37 +4,38 @@ When you run a workflow, the controller will report on its progress. -We define progress as two numbers, `N/M` such that `0 <= N <= M and 0 <= M`. +We define progress as two numbers, `N/M` such that `0 <= N <= M and 0 <= M`. * `N` is the number of completed tasks. * `M` is the total number of tasks. E.g. `0/0`, `0/1` or `50/100`. -Unlike [estimated duration](estimated-duration.md), progress is deterministic. I.e. it will be the same for each workflow, regardless of any problems. +Unlike [estimated duration](estimated-duration.md), progress is deterministic. I.e. it will be the same for each workflow, regardless of any problems. Progress for each node is calculated as follows: -2. For a pod node either `1/1` if completed or `0/1` otherwise. -3. For non-leaf nodes, the sum of its children. +1. For a pod node either `1/1` if completed or `0/1` otherwise. +2. For non-leaf nodes, the sum of its children. For a whole workflow's, progress is the sum of all its leaf nodes. - -!!! Warning + +!!! Warning `M` will increase during workflow run each time a node is added to the graph. ## Self reporting progress > v3.3 and after -Pods in a workflow can report their own progress during their runtime. This self reported progress overrides the -auto-generated progress. +Pods in a workflow can report their own progress during their runtime. This self reported progress overrides the +auto-generated progress. Reporting progress works as follows: -- create and write the progress to a file indicated by the env variable `ARGO_PROGRESS_FILE` -- format of the progress must be `N/M` -The executor will read this file every 3s and if there was an update, +* create and write the progress to a file indicated by the env variable `ARGO_PROGRESS_FILE` +* format of the progress must be `N/M` + +The executor will read this file every 3s and if there was an update, patch the pod annotations with `workflows.argoproj.io/progress: N/M`. The controller picks this up and writes the progress to the appropriate Status properties. diff --git a/docs/proposals/artifact-gc-proposal.md b/docs/proposals/artifact-gc-proposal.md new file mode 100644 index 000000000000..62c16acd33a2 --- /dev/null +++ b/docs/proposals/artifact-gc-proposal.md @@ -0,0 +1,88 @@ +# Proposal for Artifact Garbage Collection + +## Introduction + +The motivation for this is to enable users to automatically have certain Artifacts specified to be automatically garbage collected. + +Artifacts can be specified for Garbage Collection at different stages: `OnWorkflowCompletion`, `OnWorkflowDeletion`, `OnWorkflowSuccess`, `OnWorkflowFailure`, or `Never` + +## Proposal Specifics + +### Workflow Spec changes + +1. `WorkflowSpec` has an `ArtifactGC` structure, which consists of an `ArtifactGCStrategy`, as well as the optional designation of a `ServiceAccount` and Pod metadata (labels and annotations) to be used by the Pod doing the deletion. The `ArtifactGCStrategy` can be set to `OnWorkflowCompletion`, `OnWorkflowDeletion`, `OnWorkflowSuccess`, `OnWorkflowFailure`, or `Never` +2. Artifact has an `ArtifactGC` section which can be used to override the Workflow level. + +### Workflow Status changes + +1. Artifact has a boolean `Deleted` flag +2. `WorkflowStatus.Conditions` can be set to `ArtifactGCError` +3. `WorkflowStatus` can include a new field `ArtGCStatus` which holds additional information to keep track of the state of Artifact Garbage Collection. + +### How it will work + +For each `ArtifactGCStrategy` the Controller will execute one Pod that runs in the user's namespace and deletes all artifacts pertaining to that strategy. + +![Option 2 Flow](../assets/artifact-gc-option-2-flow.jpg) + +Since `OnWorkflowSuccess` happens at the same time as `OnWorkflowCompletion` and `OnWorkflowFailure` also happens at the same time as `OnWorkflowCompletion`, we can consider consolidating these GC Strategies together. + +We will have a new CRD type called `ArtifactGCTask` and use one or more of them to specify the Artifacts which the GC Pod will read and then write Status to (note individual artifacts have individual statuses). The Controller will read the Status and reflect that in the Workflow Status. The Controller will deem the `ArtifactGCTasks` ready to read once the Pod has completed (in success or failure). + +Once the GC Pod has completed and the Workflow status has been persisted, assuming the Pod completed with Success, the Controller can delete the `ArtifactGCTasks`, which will cause the GC Pod to also get deleted as it will be "owned" by the `ArtifactGCTasks`. + +The Workflow will have a Finalizer on it to prevent it from being deleted until Artifact GC has occurred. Once all deletions for all GC Strategies have occurred, the Controller will remove the Finalizer. + +### Failures + +If a deletion fails, the Pod will retry a few times through exponential back off. Note: it will not be considered a failure if the key does not exist - the principal of idempotence will allow this (i.e. if a Pod were to get evicted and then re-run it should be okay if some artifacts were previously deleted). + +Once it retries a few times, if it didn't succeed, it will end in a "Failed" state. The user will manually need to delete the `ArtifactGCTasks` (which will delete the GC Pod), and remove the Finalizer on the Workflow. + +The Failure will be reflected in both the Workflow Conditions as well as as a Kubernetes Event (and the Artifacts that failed will have "Deleted"=false). + +### Alternatives Considered + +For reference, these [slides](../assets/artifact-gc-proposal.pptx) were presented to the Argo Contributor meeting on 7/12/22 which go through some of the alternative options that were weighed. These alternatives are explained below: + +#### One Pod Per Artifact + +The [POC](https://github.com/argoproj/argo-workflows/pull/8530) that was done, which uses just one Pod to delete each Artifact, was considered as an alternative for MVP (Option 1 from the slides). + +This option has these benefits: + +- simpler in that the Pod doesn't require any additional Object to report status (e.g. `ArtifactGCTask`) because it simply succeeds or fails based on its exit code (whereas in Option 2 the Pod needs to report individual failure statuses for each artifact) +- could have a very minimal Service Account which provides access to just that one artifact's location + +and these drawbacks: + +- deletion is slower when performed by multiple Pods +- a Workflow with thousands of artifacts causes thousands of Pods to get executed, which could overwhelm kube-scheduler and kube-apiserver. +- if we delay the Artifact GC Pods by giving them a lower priority than the Workflow Pods, users will not get their artifacts deleted when they expect and may log bugs + +Summarizing ADR statement: +"In the context of Artifact Garbage Collection, facing whether to use a separate Pod for every artifact or not, we decided not to, to achieve faster garbage collection and reduced load on K8S, accepting that we will require a new CRD type." + +#### Service Account/IAM roles + +We considered some alternatives for how to specify Service Account and/or Annotations, which are applied to give the GC Pod access (slide 12). We will have them specify this information in a new `ArtifactGC` section of the spec that lives on the Workflow level but can be overridden on the Artifact level (option 3 from slide). Another option considered was to just allow specification on the Workflow level (option 2 from slide) so as to reduce the complexity of the code and reduce the potential number of Pods running, but Option 3 was selected in the end to maximize flexibility. + +Summarizing ADR statement: +"In the context of Artifact Garbage Collection, facing the question of how users should specify Service Account and annotations, we decided to give them the option to specify them on the Workflow level and/or override them on the Artifact level, to maximize flexibility for user needs, accepting that the code will be more complicated, and sometimes there will be many Pods running." + +### MVP vs post-MVP + +We will start with just S3. + +We can also make other determinations if it makes sense to postpone some parts for after MVP. + +### Workflow Spec Validation + +We can reject the Workflow during validation if `ArtifactGC` is configured along with a non-supported storage engine (for now probably anything besides S3). + +### Documentation + +Need to clarify certain things in our documentation: + +1. Users need to know that if they don't name their artifacts with unique keys, they risk the same key being deleted by one Workflow and created by another at the same time. One recommendation is to parametrize the key, e.g. `{{workflow.uid}}/hello.txt`. +2. Requirement to specify Service Account or Annotation for `ArtifactGC` specifically if they are needed (we won't fall back to default Workflow SA/annotations). Also, the Service Account needs to either be bound to the "agent" role or otherwise allow the same access to `ArtifactGCTasks`. diff --git a/docs/public-api.md b/docs/public-api.md index dc4956ec233a..c7b1b63d74dc 100644 --- a/docs/public-api.md +++ b/docs/public-api.md @@ -5,7 +5,3 @@ Argo Workflows public API is defined by the following: * The file `api/openapi-spec/swagger.json` * The schema of the table `argo_archived_workflows`. * The installation options listed in `manifests/README.md`. - -See: - -* [Versioning](versioning.md) \ No newline at end of file diff --git a/docs/quick-start.md b/docs/quick-start.md index 59e78a71aef1..43c6d7ccffc0 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -1,67 +1,105 @@ # Quick Start -To see how Argo Workflows work, you can install it and run examples of simple workflows and workflows that use artifacts. +To see how Argo Workflows work, you can install it and run examples of simple workflows. -Firstly, you'll need a Kubernetes cluster and `kubectl` set-up +Before you start you need a Kubernetes cluster and `kubectl` set up to be able to access that cluster. For the purposes of getting up and running, a local cluster is fine. You could consider the following local Kubernetes cluster options: + +* [minikube](https://minikube.sigs.k8s.io/docs/) +* [kind](https://kind.sigs.k8s.io/) +* [k3s](https://k3s.io/) or [k3d](https://k3d.io/) +* [Docker Desktop](https://www.docker.com/products/docker-desktop/) + +⚠️ These instructions are intended to help you get started quickly. They are not suitable in production. For production installs, please refer to [the installation documentation](installation.md) ⚠️ ## Install Argo Workflows -To get started quickly, you can use the quick start manifest which will install Argo Workflow as well as some commonly used components: +To install Argo Workflows, navigate to the [releases page](https://github.com/argoproj/argo-workflows/releases/latest) and find the release you wish to use (the latest full release is preferred). + +Scroll down to the `Controller and Server` section and execute the `kubectl` commands. -!!! note - These manifests are intended to help you get started quickly. They are not suitable in production, on test environments, or any environment containing any real data. They contain hard-coded passwords that are publicly available. +Below is an example of the install commands, ensure that you update the command to install the correct version number: -```sh -kubectl create ns argo -kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start-postgres.yaml +```yaml +kubectl create namespace argo +kubectl apply -n argo -f https://github.com/argoproj/argo-workflows/releases/download/v<>/install.yaml ``` -!!! note - On GKE, you may need to grant your account the ability to create new `clusterrole`s +### Patch argo-server authentication + +The argo-server (and thus the UI) defaults to client authentication, which requires clients to provide their Kubernetes bearer token in order to authenticate. For more information, refer to the [Argo Server Auth Mode documentation](argo-server-auth-mode.md). We will switch the authentication mode to `server` so that we can bypass the UI login for now: + +```bash +kubectl patch deployment \ + argo-server \ + --namespace argo \ + --type='json' \ + -p='[{"op": "replace", "path": "/spec/template/spec/containers/0/args", "value": [ + "server", + "--auth-mode=server" +]}]' -```sh -kubectl create clusterrolebinding YOURNAME-cluster-admin-binding --clusterrole=cluster-admin --user=YOUREMAIL@gmail.com ``` -!!! note - To run Argo on GKE Autopilot, you must use the `emissary` executor or the `k8sapi` executor. Find more information on our [executors doc](workflow-executors.md). +### Port-forward the UI -If you are running Argo Workflows locally (e.g. using Minikube or Docker for Desktop), open a port-forward so you can access the namespace: +Open a port-forward so you can access the UI: -```sh +```bash kubectl -n argo port-forward deployment/argo-server 2746:2746 ``` -This will serve the user interface on https://localhost:2746 +This will serve the UI on . Due to the self-signed certificate, you will receive a TLS error which you will need to manually approve. -If you're using running Argo Workflows on a remote cluster (e.g. on EKS or GKE) then [follow these instructions](argo-server.md#access-the-argo-workflows-ui). +## Install the Argo Workflows CLI -Next, Download the latest Argo CLI from our [releases page](https://github.com/argoproj/argo-workflows/releases/latest). +Next, Download the latest Argo CLI from the same [releases page](https://github.com/argoproj/argo-workflows/releases/latest). -Finally, submit an example workflow: +## Submitting an example workflow -`argo submit -n argo --watch https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-world.yaml` +### Submit an example workflow (CLI) -The `--watch` flag used above will allow you to observe the workflow as it runs and the status of whether it succeeds. +```bash +argo submit -n argo --watch https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-world.yaml +``` + +The `--watch` flag used above will allow you to observe the workflow as it runs and the status of whether it succeeds. When the workflow completes, the watch on the workflow will stop. You can list all the Workflows you have submitted by running the command below: -`argo list -n argo` +```bash +argo list -n argo +``` -You will notice the Workflow name has a `hello-world-` prefix followed by random characters. These characters are used -to give Workflows unique names to help identify specific runs of a Workflow. If you submitted this Workflow again, +You will notice the Workflow name has a `hello-world-` prefix followed by random characters. These characters are used +to give Workflows unique names to help identify specific runs of a Workflow. If you submitted this Workflow again, the next Workflow run would have a different name. -Using the `argo get` command, you can always review details of a Workflow run. The output for the command below will +Using the `argo get` command, you can always review details of a Workflow run. The output for the command below will be the same as the information shown as when you submitted the Workflow: -`argo get -n argo @latest` +```bash +argo get -n argo @latest +``` -The `@latest` argument to the CLI is a short cut to view the latest Workflow run that was executed. +The `@latest` argument to the CLI is a short cut to view the latest Workflow run that was executed. You can also observe the logs of the Workflow run by running the following: -`argo logs -n argo @latest` +```bash +argo logs -n argo @latest +``` + +### Submit an example workflow (GUI) + +* Open a port-forward so you can access the UI: + +```bash +kubectl -n argo port-forward deployment/argo-server 2746:2746 +``` + +* Navigate your browser to . + +* Click `+ Submit New Workflow` and then `Edit using full workflow options` -Now that you have understanding of using Workflows, you can check out other [Workflow examples](https://github.com/argoproj/argo-workflows/blob/master/examples/README.md) to see additional uses of Worklows. +* You can find an example workflow already in the text field. Press `+ Create` to start the workflow. diff --git a/docs/releases.md b/docs/releases.md index 3c3ec7b00e8d..b60b93457a7c 100644 --- a/docs/releases.md +++ b/docs/releases.md @@ -1,29 +1,37 @@ # Releases -## Supported Versions +You can find the most recent version under [Github release](https://github.com/argoproj/argo-workflows/releases). + +## Versioning -Versions are expressed as x.y.z, where x is the major version, y is the minor version, and z is the patch version, +Versions are expressed as `x.y.z`, where `x` is the major version, `y` is the minor version, and `z` is the patch version, following Semantic Versioning terminology. +Argo Workflows does not use Semantic Versioning. Minor versions may contain breaking changes. Patch versions only +contain bug fixes and minor features. + +For **stable**, use the latest patch version. + +⚠️ Read the [upgrading guide](upgrading.md) to find out about breaking changes before any upgrade. + +## Supported Versions + We maintain release branches for the most recent two minor releases. -Fixes may be backported to release branches, depending on severity, risk, and, feasibility. +Fixes may be back-ported to release branches, depending on severity, risk, and, feasibility. -If a release contains breaking changes, or CVE fixes, this will documented in the release notes. +If a release contains breaking changes, or CVE fixes, this will documented in [upgrading guide](upgrading.md). ## Supported Version Skew Both the `argo-server` and `argocli` should be the same version as the controller. -# Release Cycle - -For **stable**, use the latest patch version. -For **unstable**, we build and tag `latest` images for every commit to master. +## Release Cycle New minor versions are released roughly every 3 months. Release candidates for each major release are typically available for 4-6 weeks before the release becomes generally available. -Otherwise, we typically release weekly: +Otherwise, we typically release every two weeks: -* Patch fixes for the current stable version. +* Patch fixes for the current stable version. * The next release candidate, if we are currently in a release-cycle. diff --git a/docs/releasing.md b/docs/releasing.md index 60f3ca3f36d4..736d2cecd586 100644 --- a/docs/releasing.md +++ b/docs/releasing.md @@ -1,225 +1,39 @@ # Release Instructions -## Release +## Cherry-Picking Fixes -### 1. Cherry-pick Issue +✋ Before you start, make sure the release branch is passing CI. -Create a cherry-pick issue to allow the team and community to comment on the release contents. +Get a list of commits you may want to cherry-pick: -1. Locate the previous cherry-pick issue -2. Get the hash of the most recent commit still available on the previous issue -3. Generate new issue contents: - - ```sh - $ git checkout master # Ensure we are on master - $ git log --pretty=format:"%an: %s %h" [COMMIT_HASH]..HEAD - ``` -4. Create a new issue on GitHub with the title `[VERSION] cherry-pick` (e.g. `v3.0.2 cherry-pick`) and the generated commits -as content. - -### 2. Cherry-pick to Release Branch - -Once the team and community is satisfied with the commits to be cherry-picked, cherry-pick them into the appropriate -release branch. There should be a single release branch per minor release (e.g. `release-3.0`, `release-3.1`, etc.) - -1. Checkout the release branch and cherry-pick commits - - ```sh - $ git checkout relesae-3.0 - $ git cherry-pick [COMMIT_IDS...] - ``` - -2. Hope for few merge conflicts! - - A merge conflict during cherry-picking usually means the commit is based on another commit that should be - cherry-picked first. In case of a merge conflict, you can undo the cherry-picking by `git cherry-pick --abort` and - revisit the list of commits to make sure the prior commits are cherry-picked as well. - -3. Once done cherry-picking, push the release branch to ensure the branch can build and all tests pass. - -### 3. Prepare the Release - -> v2 - -`v2` releases still depend on the previous repository name (`github.com/argoproj/argo`). To release for `v2`, -make a local clone of the repository under the name `argo`: - -```shell -$ pwd -/Users//go/src/github.com/argoproj/argo-workflows -$ cd .. -$ cp -r argo-workflows argo -$ cd argo -``` - -Then follow all the normal steps. You should delete the `argo` folder once the release is done to avoid confusion and conflicts. - -#### Preparing the release - -> Before v3.1 - -1. Releasing requires a clean tree state, so back-up any untracked files in your Git directory. - - **Only once your files are backed up**, run: - ```shell - $ git clean -fdx # WARNING: Will delete untracked files! - ``` - -2. To generate new manifests and perform basic checks: - - ```shell - $ make prepare-release -B VERSION=v3.0.3 - ``` - -3. Once done, push the release branch and ensure the branch is green and all tests pass. - - ```shell - $ git push - ``` - -4. Publish the images and local Git changes (disabling K3D as this is faster and more reliable for releases): - - ```shell - $ make publish-release K3D=false VERSION=v3.0.3 - ``` - -5. Wait 1h to 2h. - -> v3.1 and after - -Create and push a release tag: - -``` -git tag v3.1.0 -git push origin v3.1.0 -``` - -The release will then be done automatically by a Github action. - -### 4. Ensure the Release Succeeded - -> Before v3.1 - -1. Check the images were pushed successfully. Ensure the `GitTreeState` is `Clean`. - ```sh - $ docker run argoproj/argoexec:v3.0.3 version - $ docker run argoproj/workflow-controller:v3.0.3 version - $ docker run argoproj/argocli:v3.0.3 version - ``` - -1. Check the correct versions are printed. Ensure the `GitTreeState` is `Clean`. - - ```sh - $ ./dist/argo-darwin-amd64 version - ``` - -1. Check the manifests contain the correct tags (search for `v3.0.3`): [https://raw.githubusercontent.com/argoproj/argo-workflows/v3.0.3/manifests/install.yaml](https://raw.githubusercontent.com/argoproj/argo-workflows/v3.0.3/manifests/install.yaml) - -1. Check the manifests apply: `kubectl -n argo apply -f https://raw.githubusercontent.com/argoproj/argo-workflows/v3.0.3/manifests/install.yaml` - -> v3.1 and after - -No action needed. - -### 5. Release Notes - -In [upgrading](upgrading.md), detail: - -* All breaking changes are listed with migration steps -* The release notes identify every publicly known vulnerability with a CVE assignment - -The change log is automatically generated by a Github action. - -> Before v3.1 - -The release title should be the version number (e.g. `v3.0.3`) and nothing else. - -Use hack/release-notes.md as the template for your new release notes. - -> v3.1 and after - -This is done automatically by a Github action. - -### 6. Upload Binaries and SHA256 Sums To GitHub - -> Before v3.1 - -After running `make publish-release`, you will have the zipped binaries and SHA256 sums in your local. - -Open them with: - -```shell -$ open dist +```bash +./hack/what-to-cherry-pick.sh release-3.3 ``` -Upload only the zipped binaries (`.gz` suffix) and SHA256 sums (`.sha256` suffix) to GitHub. There should be 12 uploaded files in total. - -> v3.1 and after +Ignore: -This is done automatically by a Github action. +* Fixes for features only on master. +* Dependency upgrades, unless it fixes a known security issue. -### 7. Update Stable Tag +Cherry-pick a the first commit. Run `make test` locally before pushing. If the build timeouts the build caches may have +gone, try re-running. -> Before v3.1 +Don't cherry-pick another commit until the CI passes. It is harder to find the cause of a new failed build if the last +build failed too. -If this is GA: - -Update the `stable` tag - -``` -git tag -f stable -git push -f origin stable -``` +Cherry-picking commits one-by-one and then waiting for the CI will take a long time. Instead, cherry-pick each commit then +run `make test` locally before pushing. -Check the manifests contain the correct tags: [https://raw.githubusercontent.com/argoproj/argo-workflows/stable/manifests/install.yaml](https://raw.githubusercontent.com/argoproj/argo-workflows/stable/manifests/install.yaml) +## Publish Release -> v3.1 and after +✋ Before you start, make sure the branch is passing CI. -Delete the `stable` tag. - -``` -git tag -D stable -git push origin :stable -``` - -### 8. Update Homebrew - -If this is GA: - -Update the Homebrew formula. +Push a new tag to the release branch. E.g.: ```bash -export HOMEBREW_GITHUB_API_TOKEN=$GITHUB_TOKEN -brew bump-formula-pr argo --version 3.0.3 -``` - -Check that Homebrew was successfully updated after the PR was merged: - - ``` - brew upgrade argo - /usr/local/bin/argo version - ``` - -### 9. Update Java SDK - -If this is GA: - -Update the Java SDK formula. - +git tag v3.3.4 +git push upstream v3.3.4 ;# or origin if you do not use upstream ``` -git clone git@github.com:argoproj-labs/argo-client-java.git -cd argo-client-java -make publish VERSION=v3.0.3 -``` - -Check package published: [https://github.com/argoproj-labs/argo-client-java/packages](https://github.com/argoproj-labs/argo-client-java/packages) - -### 10. Publish Release - -> Before v3.1 - -Finally, press publish on the GitHub release. Congrats, you're done! - -> v3.1 and after -This is done automatically by a Github action. +Github Actions will automatically build and publish your release. This takes about 1h. Set your self a reminder to check +this was successful. diff --git a/docs/resource-duration.md b/docs/resource-duration.md index 221aa2553ddc..8578526bebf2 100644 --- a/docs/resource-duration.md +++ b/docs/resource-duration.md @@ -1,17 +1,15 @@ # Resource Duration -![GA](assets/ga.svg) - > v2.7 and after -Argo Workflows provides an indication of how much resource your workflow has used and saves this +Argo Workflows provides an indication of how much resource your workflow has used and saves this information. This is intended to be an **indicative but not accurate** value. ## Calculation -The calculation is always an estimate, and is calculated by [duration.go](https://github.com/argoproj/argo-workflows/blob/master/util/resource/duration.go) -based on container duration, specified pod resource requests, limits, or (for memory and CPU) -defaults. +The calculation is always an estimate, and is calculated by [`duration.go`](https://github.com/argoproj/argo-workflows/blob/master/util/resource/duration.go) +based on container duration, specified pod resource requests, limits, or (for memory and CPU) +defaults. Each indicator is divided by a common denominator depending on resource type. @@ -19,32 +17,32 @@ Each indicator is divided by a common denominator depending on resource type. Each resource type has a denominator used to make large values smaller. - * CPU: `1` - * Memory: `1Gi` - * Storage: `10Gi` - * Ephemeral Storage: `10Gi` - * All others: `1` +* CPU: `1` +* Memory: `1Gi` +* Storage: `10Gi` +* Ephemeral Storage: `10Gi` +* All others: `1` -The requested fraction of the base amount will be multiplied by the container's run time to get -the container's Resource Duration. +The requested fraction of the base amount will be multiplied by the container's run time to get +the container's Resource Duration. -For example, if you've requested `100Mi` of memory (one tenth of the base amount), and the container -runs 120sec, then the reported Resource Duration will be `12sec * (1Gi memory)`. +For example, if you've requested `100Mi` of memory (one tenth of the base amount), and the container +runs 120sec, then the reported Resource Duration will be `12sec * (1Gi memory)`. ### Request Defaults If `requests` are not set for a container, Kubernetes defaults to `limits`. If `limits` are not set, -Argo falls back to `100m` for CPU and `100Mi` for memory. +Argo falls back to `100m` for CPU and `100Mi` for memory. -**Note:** these are Argo's defaults, _not_ Kubernetes' defaults. For the most meaningful results, +**Note:** these are Argo's defaults, _not_ Kubernetes' defaults. For the most meaningful results, set `requests` and/or `limits` for all containers. ### Example -A pod that runs for 3min, with a CPU limit of `2000m`, no memory request and an `nvidia.com/gpu` +A pod that runs for 3min, with a CPU limit of `2000m`, no memory request and an `nvidia.com/gpu` resource limit of `1`: -``` +```text CPU: 3min * 2000m / 1000m = 6min * (1 cpu) Memory: 3min * 100Mi / 1Gi = 18sec * (100Mi memory) GPU: 3min * 1 / 1 = 2min * (1 nvidia.com/gpu) @@ -55,10 +53,10 @@ GPU: 3min * 1 / 1 = 2min * (1 nvidia.com/gpu) Both the web and CLI give abbreviated usage, like `9m10s*cpu,6s*memory,2m31s*nvidia.com/gpu`. In this context, resources like `memory` refer to the "base amounts". -For example, `memory` means "amount of time a resource requested 1Gi of memory." If a container only +For example, `memory` means "amount of time a resource requested 1Gi of memory." If a container only uses 100Mi, each second it runs will only count as a tenth-second of `memory`. ## Rounding Down -For short running pods (<10s), the memory value may be 0s. This is because the default is `100Mi`, -but the denominator is `1Gi`. +For short running pods (<10s), the memory value may be 0s. This is because the default is `100Mi`, +but the denominator is `1Gi`. diff --git a/docs/rest-api.md b/docs/rest-api.md index 0276dd7faa37..09eab21ee0cc 100644 --- a/docs/rest-api.md +++ b/docs/rest-api.md @@ -2,15 +2,13 @@ ## Argo Server API -![GA](assets/ga.svg) - > v2.5 and after -Argo Workflows ships with a server that provide more features and security than before. +Argo Workflows ships with a server that provides more features and security than before. -The server can be configured with or without client auth (`server --auth-mode client`). When it is disabled, then clients must pass their Kubeconfig base 64 encoded in the HTTP `Authorization` header: +The server can be configured with or without client auth (`server --auth-mode client`). When it is disabled, then clients must pass their KUBECONFIG base 64 encoded in the HTTP `Authorization` header: -``` +```bash ARGO_TOKEN=$(argo auth token) curl -H "Authorization: $ARGO_TOKEN" https://localhost:2746/api/v1/workflows/argo ``` @@ -18,7 +16,6 @@ curl -H "Authorization: $ARGO_TOKEN" https://localhost:2746/api/v1/workflows/arg * Learn more on [how to generate an access token](access-token.md). API reference docs : - + * [Latest docs](swagger.md) (maybe incorrect) * Interactively in the [Argo Server UI](https://localhost:2746/apidocs). (>= v2.10) - diff --git a/docs/rest-examples.md b/docs/rest-examples.md index 0af3c1dad6b2..01e05376dd87 100644 --- a/docs/rest-examples.md +++ b/docs/rest-examples.md @@ -1,25 +1,25 @@ # API Examples -Document contains couple of examples of workflow JSON's to submit via argo-server REST API. +Document contains couple of examples of workflow JSON's to submit via argo-server REST API. > v2.5 and after Assuming * the namespace of argo-server is argo -* authentication is turned off (otherwise provide Authentication header) +* authentication is turned off (otherwise provide Authorization header) * argo-server is available on localhost:2746 ## Submitting workflow -``` +```bash curl --request POST \ --url https://localhost:2746/api/v1/workflows/argo \ --header 'content-type: application/json' \ --data '{ "namespace": "argo", "serverDryRun": false, - "workflow": { + "workflow": { "metadata": { "generateName": "hello-world-", "namespace": "argo", @@ -57,21 +57,21 @@ curl --request POST \ ## Getting workflows for namespace argo -``` +```bash curl --request GET \ --url https://localhost:2746/api/v1/workflows/argo ``` ## Getting single workflow for namespace argo -``` +```bash curl --request GET \ --url https://localhost:2746/api/v1/workflows/argo/abc-dthgt ``` ## Deleting single workflow for namespace argo -``` +```bash curl --request DELETE \ --url https://localhost:2746/api/v1/workflows/argo/abc-dthgt ``` diff --git a/docs/resuming-workflow-via-automation.md b/docs/resuming-workflow-via-automation.md deleted file mode 100644 index f293debebd1c..000000000000 --- a/docs/resuming-workflow-via-automation.md +++ /dev/null @@ -1,38 +0,0 @@ -# Resume A Workflow - -For automation, we want just the name of the workflow, we can use labels to get just this our suspended workflow: - -```sh -WF=$(argo list -l workflows.argoproj.io/workflow-template=wait --running -o name) -``` - -```sh -WF=$(curl $ARGO_SERVER/api/v1/workflows/argo?listOptions.labelSelector=workflows.argoproj.io/workflow-template=wait,\!workflows.argoproj.io/completed \ - -fs \ - -H "Authorization: $ARGO_TOKEN" | - jq -r '.items[0].metadata.name') -``` - -You can resume the workflow via the CLI or API too. If you have more than one node waiting, you must target it using a [node field selector](node-field-selector.md). - -````sh -argo resume $WF --node-field-selector displayName=a -```` - -```sh -curl $ARGO_SERVER/api/v1/workflows/argo/$WF/resume \ - -fs \ - -X 'PUT' \ - -H "Authorization: $ARGO_TOKEN" \ - -d '{"nodeFieldSelector": "displayName=a"}' -``` - -Now the workflow will have resumed and completed. - -See also: - -* [access token](access-token.md) -* [resuming a workflow via automation](resuming-workflow-via-automation.md) -* [submitting a workflow via automation](submit-workflow-via-automation.md) -* [one workflow submitting another](workflow-submitting-workflow.md) -* [async pattern](async-pattern.md) diff --git a/docs/retries.md b/docs/retries.md index f8a8b325e6a7..e0618a68d950 100644 --- a/docs/retries.md +++ b/docs/retries.md @@ -2,7 +2,7 @@ Argo Workflows offers a range of options for retrying failed steps. -## Configuring `retryStrategy` in WorkflowSpec +## Configuring `retryStrategy` in `WorkflowSpec` ```yaml apiVersion: argoproj.io/v1alpha1 @@ -26,10 +26,10 @@ spec: Use `retryPolicy` to choose which failures to retry: -- Always: Retry all failed steps -- OnFailure: Retry steps whose main container is marked as failed in Kubernetes -- OnError: Retry steps that encounter Argo controller errors, or whose init or wait containers fail -- OnTransientError: Retry steps that encounter errors [defined as transient](https://github.com/argoproj/argo-workflows/blob/master/util/errors/errors.go), or errors matching the TRANSIENT_ERROR_PATTERN [environment variable](https://argoproj.github.io/argo-workflows/environment-variables/). Available in version 3.0 and later. +- `Always`: Retry all failed steps +- `OnFailure`: Retry steps whose main container is marked as failed in Kubernetes (this is the default) +- `OnError`: Retry steps that encounter Argo controller errors, or whose init or wait containers fail +- `OnTransientError`: Retry steps that encounter errors [defined as transient](https://github.com/argoproj/argo-workflows/blob/master/util/errors/errors.go), or errors matching the `TRANSIENT_ERROR_PATTERN` [environment variable](https://argoproj.github.io/argo-workflows/environment-variables/). Available in version 3.0 and later. For example: @@ -68,6 +68,6 @@ If `expression` evaluates to false, the step will not be retried. See [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/retry-conditional.yaml) for usage. -## Backoff +## Back-Off You can configure the delay between retries with `backoff`. See [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/retry-backoff.yaml) for usage. diff --git a/docs/roadmap.md b/docs/roadmap.md index 1968ab23d69b..95d511a9661f 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -1,3 +1,3 @@ # Roadmap -[Open the roadmap ](https://docs.google.com/document/d/1TzhgIPHnlUI9tVqcjoZVmvjuPAIZf5AyygGqL98BBaI/edit?usp=sharing) \ No newline at end of file +[Open the roadmap](https://docs.google.com/document/d/1TzhgIPHnlUI9tVqcjoZVmvjuPAIZf5AyygGqL98BBaI/edit?usp=sharing) diff --git a/docs/running-at-massive-scale.md b/docs/running-at-massive-scale.md index 6bdf4861d358..bfb6df8278eb 100644 --- a/docs/running-at-massive-scale.md +++ b/docs/running-at-massive-scale.md @@ -32,7 +32,5 @@ Where Argo has a lot of work to do, the Kubernetes API can be overwhelmed. There ## Overwhelmed Database If you're running workflows with many nodes, you'll probably be offloading data to a database. Offloaded data is kept -for 5m. You can reduce the number of records create by setting `DEFAULT_REQUEUE_TIME=1m`. This will slow reconciliation, +for 5m. You can reduce the number of records created by setting `DEFAULT_REQUEUE_TIME=1m`. This will slow reconciliation, but will suit workflows where nodes run for over 1m. - - diff --git a/docs/running-locally.md b/docs/running-locally.md index 441f5b826957..822c50031cd2 100644 --- a/docs/running-locally.md +++ b/docs/running-locally.md @@ -1,18 +1,55 @@ # Running Locally +You have two options: + +1. If you're using VSCode, you use the [Dev-Container](#development-container). This takes about 7 minutes. +1. Install the [requirements](#requirements) on your computer manually. This takes about 1 hour. + +## Git Clone + +Clone the Git repo into: `$(GOPATH)/src/github.com/argoproj/argo-workflows`. Any other path will mean the code +generation does not work. + +## Development Container + +A development container is a running Docker container with a well-defined tool/runtime stack and its prerequisites. +[The Visual Studio Code Remote - Containers](https://code.visualstudio.com/docs/remote/containers) extension lets you use a Docker container as a full-featured development environment. + +System requirements can be found [here](https://code.visualstudio.com/docs/remote/containers#_system-requirements) + +Note: + +* `GOPATH` must be `$HOME/go`. +* for **Apple Silicon** + * This platform can spend 3 times the indicated time + * Configure Docker Desktop to use BuildKit: + + ```json + "features": { + "buildkit": true + }, + ``` + +* For **Windows WSL2** + * Configure [`.wslconfig`](https://docs.microsoft.com/en-us/windows/wsl/wsl-config#configuration-setting-for-wslconfig) to limit memory usage by the WSL2 to prevent VSCode OOM. + +* For **Linux** + * Use [Docker Desktop](https://docs.docker.com/desktop/linux/install/) instead of [Docker Engine](https://docs.docker.com/engine/install/) to prevent incorrect network configuration by k3d + ## Requirements -* [Go 1.17](https://golang.org/dl/) +* [Go 1.18](https://golang.org/dl/) * [Yarn](https://classic.yarnpkg.com/en/docs/install/#mac-stable) * [Docker](https://docs.docker.com/get-docker/) -* [protoc](http://google.github.io/proto-lens/installing-protoc.html) -* [jq](https://stedolan.github.io/jq/download/) -* A local Kubernetes cluster (`k3d`, `kind`, or `minikube`) +* [`protoc`](http://google.github.io/proto-lens/installing-protoc.html) +* [`jq`](https://stedolan.github.io/jq/download/) +* [`node` >= 16](https://nodejs.org/download/release/latest-v16.x/) for running the UI +* A local Kubernetes cluster ([`k3d`](https://k3d.io/), [`kind`](https://kind.sigs.k8s.io/docs/user/quick-start/#installation), or [`minikube`](https://minikube.sigs.k8s.io/docs/start/)) We recommend using [K3D](https://k3d.io/) to set up the local Kubernetes cluster since this will allow you to test RBAC set-up and is fast. You can set-up K3D to be part of your default kube config as follows: -```shell +```bash k3d cluster start --wait ``` @@ -20,105 +57,187 @@ Alternatively, you can use [Minikube](https://github.com/kubernetes/minikube) to Once a local Kubernetes cluster has started via `minikube start`, your kube config will use Minikube's context automatically. -## Developing locally +⚠️ Do not use Docker for Desktop with its embedded Kubernetes, it does not support Kubernetes RBAC (i.e. `kubectl auth can-i` always +returns `allowed`). -!!! Warning - The git repo must be checked out into: `$(GOPATH)/src/github.com/argoproj/argo-workflows` +## Developing locally Add the following to your `/etc/hosts`: -``` +```text 127.0.0.1 dex 127.0.0.1 minio 127.0.0.1 postgres 127.0.0.1 mysql +127.0.0.1 azurite ``` -To run the controller and argo-server API locally, with MinIO inside the "argo" namespace of your cluster: +To start: + +* The controller, so you can run workflows. +* MinIO (, use admin/password) so you can use artifacts: -```shell +Run: + +```bash +make start +``` + +Make sure you don't see any errors in your terminal. This runs the Workflow Controller locally on your machine (not in Docker/Kubernetes). + +You can submit a workflow for testing using `kubectl`: + +```bash +kubectl create -f examples/hello-world.yaml +``` + +We recommend running `make clean` before `make start` to ensure recompilation. + +If you made changes to the executor, you need to build the image: + +```bash +make argoexec-image +``` + +To also start the API on : + +```bash make start API=true ``` - -To start the UI, use `UI=true`: -```shell -make start API=true UI=true +This runs the Argo Server (in addition to the Workflow Controller) locally on your machine. + +To also start the UI on (`UI=true` implies `API=true`): + +```bash +make start UI=true ``` -To test the workflow archive, use `PROFILE=mysql`: +![diagram](assets/make-start-UI-true.png) + +If you are making change to the CLI (i.e. Argo Server), you can build it separately if you want: -```shell -make start API=true UI=true PROFILE=mysql +```bash +make cli +./dist/argo submit examples/hello-world.yaml ;# new CLI is created as `./dist/argo` ``` - -To test SSO integration, use `PROFILE=sso`: -```shell -make start API=true UI=true PROFILE=sso +Although, note that this will be built automatically if you do: `make start API=true`. + +To test the workflow archive, use `PROFILE=mysql` or `PROFILE=postgres`: + +```bash +make start PROFILE=mysql ``` -You’ll now have: +You'll have, either: -* Argo UI on http://localhost:8080 -* Argo Server API on https://localhost:2746 -* MinIO on http://localhost:9000 (use admin/password) -* Postgres on http://localhost:5432, run `make postgres-cli` to access. -* MySQL on http://localhost:3306, run `make mysql-cli` to access. +* Postgres on , run `make postgres-cli` to access. +* MySQL on , run `make mysql-cli` to access. -Before submitting/running workflows, build the executor images with this command: +To test SSO integration, use `PROFILE=sso`: -```shell -make argoexec-image +```bash +make start UI=true PROFILE=sso ``` ### Running E2E tests locally -1. Configure your IDE to set the `KUBECONFIG` environment variable to your k3d kubeconfig file -2. Find an e2e test that you want to run in `test/e2e` -3. Determine which profile the e2e test is using by inspecting the go build flag at the top of the file and referring to [ci-build.yaml](https://github.com/argoproj/argo-workflows/blob/master/.github/workflows/ci-build.yaml) +Start up Argo Workflows using the following: + +```bash +make start PROFILE=mysql AUTH_MODE=client STATIC_FILES=false API=true +``` - For example `TestArchiveStrategies` in `test/e2e/functional_test.go` has the following build flags +If you want to run Azure tests against a local Azurite, add `AZURE=true`: - ```go - //go:build functional - // +build functional - ``` +```bash +make start PROFILE=mysql AUTH_MODE=client STATIC_FILES=false API=true AZURE=true +``` - In [ci-build.yaml](https://github.com/argoproj/argo-workflows/blob/master/.github/workflows/ci-build.yaml) the functional test suite is using the `minimal` profile +#### Running One Test -4. Run the profile in a terminal window +In most cases, you want to run the test that relates to your changes locally. You should not run all the tests suites. +Our CI will run those concurrently when you create a PR, which will give you feedback much faster. - ```shell - make start PROFILE=minimal E2E_EXECUTOR=emissary AUTH_MODE=client STATIC_FILES=false LOG_LEVEL=info API=true UI=false - ``` +Find the test that you want to run in `test/e2e` + +```bash +make TestArtifactServer +``` + +If you wish to include tests against Azure Storage, define `AZURE=true`: + +```bash +make AZURE=true TestArtifactServer +``` + +#### Running A Set Of Tests -5. Run the test in your IDE +You can find the build tag at the top of the test file. + +```go +//go:build api +``` + +You need to run `make test-{buildTag}`, so for `api` that would be: + +```bash +make test-api +``` + +#### Diagnosing Test Failure + +Tests often fail: that's good. To diagnose failure: + +* Run `kubectl get pods`, are pods in the state you expect? +* Run `kubectl get wf`, is your workflow in the state you expect? +* What do the pod logs say? I.e. `kubectl logs`. +* Check the controller and argo-server logs. These are printed to the console you ran `make start` in. Is anything + logged at `level=error`? + +If tests run slowly or time out, factory reset your Kubernetes cluster. ## Committing Before you commit code and raise a PR, always run: -```shell +```bash make pre-commit -B ``` -Please adhere to the following when creating your commits: +Please do the following when creating your PR: * Sign-off your commits. * Use [Conventional Commit messages](https://www.conventionalcommits.org/en/v1.0.0/). * Suffix the issue number. -Example: +Examples: -```shell +```bash git commit --signoff -m 'fix: Fixed broken thing. Fixes #1234' ``` -Troubleshooting: +```bash +git commit --signoff -m 'feat: Added a new feature. Fixes #1234' +``` + +## Troubleshooting * When running `make pre-commit -B`, if you encounter errors like - `make: *** [pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.swagger.json] Error 1`, - ensure that you have checked out your code into `$(GOPATH)/src/github.com/argoproj/argo-workflows`. + `make: *** [pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.swagger.json] Error 1`, ensure that you + have checked out your code into `$(GOPATH)/src/github.com/argoproj/argo-workflows`. * If you encounter "out of heap" issues when building UI through Docker, please validate resources allocated to Docker. Compilation may fail if allocated RAM is less than 4Gi. + +## Using Multiple Terminals + +I run the controller in one terminal, and the UI in another. I like the UI: it is much faster to debug workflows than +the terminal. This allows you to make changes to the controller and re-start it, without restarting the UI (which I +think takes too long to start-up). + +As a convenience, `CTRL=false` implies `UI=true`, so just run: + +```bash +make start CTRL=false +``` diff --git a/docs/scaling.md b/docs/scaling.md index 2c026bda66b4..c81b1382504a 100644 --- a/docs/scaling.md +++ b/docs/scaling.md @@ -10,7 +10,7 @@ You cannot horizontally scale the controller. You can scale the controller vertically: -- If you have many workflows, increase `--workflow-workers` and `--workflow-ttl-workers`. +- If you have many workflows, increase `--workflow-workers` and `--workflow-ttl-workers`. - Increase both `--qps` and `--burst`. You will need to increase the controller's memory and CPU. @@ -23,13 +23,13 @@ Rather than running a single installation in your cluster, run one per namespace ### Instance ID -Within a cluster can use instance ID to run N Argo instances within a cluster. +Within a cluster can use instance ID to run N Argo instances within a cluster. Create one namespace for each Argo, e.g. `argo-i1`, `argo-i2`:. Edit [workflow-controller-configmap.yaml](workflow-controller-configmap.yaml) for each namespace to set an instance ID. -``` +```yaml apiVersion: v1 kind: ConfigMap metadata: @@ -42,7 +42,7 @@ data: You may need to pass the instance ID to the CLI: -``` +```bash argo --instanceid i1 submit my-wf.yaml ``` diff --git a/docs/security.md b/docs/security.md index 9a9571ace27b..a0b691065fb3 100644 --- a/docs/security.md +++ b/docs/security.md @@ -1,6 +1,8 @@ # Security -See [SECURITY.md](https://github.com/argoproj/argo-workflows/blob/master/SECURITY.md). +[To report security issues](https://github.com/argoproj/argo-workflows/blob/master/SECURITY.md). + +💡 Read [Practical Argo Workflows Hardening](https://blog.argoproj.io/practical-argo-workflows-hardening-dd8429acc1ce). ## Workflow Controller Security @@ -14,7 +16,7 @@ The controller has permission (via Kubernetes RBAC + its config map) with either * Create/get/delete pods, PVCs, and PDBs. * List/get template, config maps, service accounts, and secrets. -See [workflow-controller-clusterrole.yaml](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml) or [workflow-controller-role.yaml](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml) +See [workflow controller cluster-role](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml) or [workflow-controller-role.yaml](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml) ### User Permissions @@ -22,12 +24,12 @@ Users minimally need permission to create/read workflows. The controller will th A way to think of this is that, if the user has permission to create a workflow in a namespace, then it is OK to create pods or anything else for them in that namespace. -If the user only has permission to create workflows, then they will be typically unable to configure other necessary resources such as config maps, or view the outcome of their workflow. This is useful when the user is a service. +If the user only has permission to create workflows, then they will be typically unable to configure other necessary resources such as config maps, or view the outcome of their workflow. This is useful when the user is a service. !!! Warning If you allow users to create workflows in the controller's namespace (typically `argo`), it may be possible for users to modify the controller itself. In a namespace-install the managed namespace should therefore not be the controller's namespace. -You can typically further restrict what a user can do to just being able to submit workflows from templates using [the workflow requriments feature](workflow-restrictions.md). +You can typically further restrict what a user can do to just being able to submit workflows from templates using [the workflow requirements feature](workflow-restrictions.md). ### Workflow Pod Permissions @@ -42,11 +44,11 @@ This service account typically needs [permissions](workflow-rbac.md). Different service accounts should be used if a workflow pod needs to have elevated permissions, e.g. to create other resources. -The main container will have the service account token mounted , allowing the main container to patch pods (amongst other permissions). Set `automountServiceAccountToken` to false to prevent this. See [fields](fields.md). +The main container will have the service account token mounted , allowing the main container to patch pods (among other permissions). Set `automountServiceAccountToken` to false to prevent this. See [fields](fields.md). By default, workflows pods run as `root`. To further secure workflow pods, set the [workflow pod security context](workflow-pod-security-context.md). -You should configure the controller with the correct [workflow executor](workflow-executors.md) for your trade off between security and scalabily. +You should configure the controller with the correct [workflow executor](workflow-executors.md) for your trade off between security and scalability. These settings can be set by default using [workflow defaults](default-workflow-specs.md). @@ -66,17 +68,17 @@ You can achieve this by configuring the `argo-server` role ([example](https://gi ## Network Security -Argo Workflows requires various levels of network access depending on configuration and the features enabled. The following describes the different workflow components and their network access needs, to help provide guidance on how to configure the argo namespace in a secure manner (e.g. NetworkPolicies). +Argo Workflows requires various levels of network access depending on configuration and the features enabled. The following describes the different workflow components and their network access needs, to help provide guidance on how to configure the argo namespace in a secure manner (e.g. `NetworkPolicy`). ### Argo Server -The argo server is commonly exposed to end-users to provide users with a user interface for visualizing and managing their workflows. It must also be exposed if leveraging [webhooks](webhooks.md) to trigger workflows. Both of these use cases require that the argo-server Service to be exposed for ingress traffic (e.g. with an Ingress object or load balancer). Note that the Argo UI is also available to be accessed by running the server locally (i.e. `argo server`) using local kubeconfig credentials, and visiting the UI over https://localhost:2746. +The Argo Server is commonly exposed to end-users to provide users with a UI for visualizing and managing their workflows. It must also be exposed if leveraging [webhooks](webhooks.md) to trigger workflows. Both of these use cases require that the argo-server Service to be exposed for ingress traffic (e.g. with an Ingress object or load balancer). Note that the Argo UI is also available to be accessed by running the server locally (i.e. `argo server`) using local KUBECONFIG credentials, and visiting the UI over . -The argo server additionally has a feature to allow downloading of artifacts through the user interface. This feature requires that the argo-server be given egress access to the underlying artifact provider (e.g. S3, GCS, MinIO, Artifactory) in order to download and stream the artifact. +The Argo Server additionally has a feature to allow downloading of artifacts through the UI. This feature requires that the argo-server be given egress access to the underlying artifact provider (e.g. S3, GCS, MinIO, Artifactory, Azure Blob Storage) in order to download and stream the artifact. ### Workflow Controller -The workflow-controller Deployment exposes a Prometheus metrics endpoint (workflow-controller-metrics:9090) so that a Prometheus server can periodically scrape for controller level metrics. Since prometheus is typically running in a separate namespace, the argo namespace should be configured to allow cross-namespace ingress access to the workflow-controller-metrics Service. +The workflow-controller Deployment exposes a Prometheus metrics endpoint (workflow-controller-metrics:9090) so that a Prometheus server can periodically scrape for controller level metrics. Since Prometheus is typically running in a separate namespace, the argo namespace should be configured to allow cross-namespace ingress access to the workflow-controller-metrics Service. ### Database access diff --git a/docs/service-accounts.md b/docs/service-accounts.md index aa05ca540a7a..4b44c1b55c40 100644 --- a/docs/service-accounts.md +++ b/docs/service-accounts.md @@ -2,7 +2,7 @@ ## Configure the service account to run Workflows -### Roles, RoleBindings, and ServiceAccounts +### Roles, Role-Bindings, and Service Accounts In order for Argo to support features such as artifacts, outputs, access to secrets, etc. it needs to communicate with Kubernetes resources using the Kubernetes API. To communicate with the Kubernetes API, Argo uses a `ServiceAccount` to authenticate itself to the Kubernetes API. @@ -10,7 +10,7 @@ You can specify which `Role` (i.e. which permissions) the `ServiceAccount` that Then, when submitting Workflows you can specify which `ServiceAccount` Argo uses using: -```sh +```bash argo submit --serviceaccount ``` @@ -22,10 +22,9 @@ For more information about granting Argo the necessary permissions for your use For the purposes of this demo, we will grant the `default` `ServiceAccount` admin privileges (i.e., we will bind the `admin` `Role` to the `default` `ServiceAccount` of the current namespace): -```sh +```bash kubectl create rolebinding default-admin --clusterrole=admin --serviceaccount=argo:default -n argo ``` **Note that this will grant admin privileges to the `default` `ServiceAccount` in the namespace that the command is run from, so you will only be able to run Workflows in the namespace where the `RoleBinding` was made.** - diff --git a/docs/sidecar-injection.md b/docs/sidecar-injection.md index 9180734fe945..470c48f6d7db 100644 --- a/docs/sidecar-injection.md +++ b/docs/sidecar-injection.md @@ -11,8 +11,7 @@ You will minimize problems by not using Istio with Argo Workflows. See [#1282](https://github.com/argoproj/argo-workflows/issues/1282). - -### Support Matrix +## Support Matrix Key: @@ -20,13 +19,13 @@ Key: * Any - we can kill any image * KubectlExec - we kill images by running `kubectl exec` -| Executor | Sidecar | Injected Sidecar | -|---|---|---| -| `docker` | Any | Unsupported | -| `emissary` | Any | KubectlExec | -| `k8sapi` | Shell | KubectlExec | -| `kubelet` | Shell | KubectlExec | -| `pns` | Any | Any | +| Executor | Sidecar | Injected Sidecar | +|---|---|---| +| `docker` | Any | Unsupported | +| `emissary` | Any | KubectlExec | +| `k8sapi` | Shell | KubectlExec | +| `kubelet` | Shell | KubectlExec | +| `pns` | Any | Any | ## How We Kill Sidecars Using `kubectl exec` @@ -35,7 +34,7 @@ Key: Kubernetes does not provide a way to kill a single container. You can delete a pod, but this kills all containers, and loses all information and logs of that pod. -Instead, try to mimic the Kubernetes termination behaviour, which is: +Instead, try to mimic the Kubernetes termination behavior, which is: 1. SIGTERM PID 1 1. Wait for the pod's `terminateGracePeriodSeconds` (30s by default). diff --git a/docs/static-code-analysis.md b/docs/static-code-analysis.md index 45593ccf205f..9c3c8b5cb800 100644 --- a/docs/static-code-analysis.md +++ b/docs/static-code-analysis.md @@ -2,7 +2,7 @@ We use the following static code analysis tools: -* golangci-lint and tslint for compile time linting -* [snyk.io](https://app.snyk.io/org/argoproj/projects) - for image scanning +* `golangci-lint` and `tslint` for compile time linting. +* [Snyk](https://app.snyk.io/org/argoproj/projects) for image scanning. These are at least run daily or on each pull request. diff --git a/docs/stress-testing.md b/docs/stress-testing.md index ae98b75f5dc6..65706296990a 100644 --- a/docs/stress-testing.md +++ b/docs/stress-testing.md @@ -27,12 +27,12 @@ argo submit examples/hello-world.yaml --watch Checks -* Open http://localhost:2746/workflows and check it loads and that you can run a workflow. -* Open http://localhost:9090/metrics and check you can see the Prometheus metrics. -* Open http://localhost:9091/graph and check you can see a Prometheus graph. You can +* Open and check it loads and that you can run a workflow. +* Open and check you can see the Prometheus metrics. +* Open and check you can see a Prometheus graph. You can use [this Tab Auto Refresh Chrome extension](https://chrome.google.com/webstore/detail/tab-auto-refresh/oomoeacogjkolheacgdkkkhbjipaomkn) to auto-refresh the page. -* Open http://localhost:6060/debug/pprof and check you can access pprof. +* Open and check you can access `pprof`. Run `go run ./test/stress/tool -n 10000` to run a large number of workflows. @@ -55,4 +55,4 @@ go tool pprof -png http://localhost:6060/debug/pprof/profile ```bash gcloud container clusters delete argo-workflows-stress-1 -``` \ No newline at end of file +``` diff --git a/docs/submit-workflow-via-automation.md b/docs/submit-workflow-via-automation.md deleted file mode 100644 index cb0a2773cda1..000000000000 --- a/docs/submit-workflow-via-automation.md +++ /dev/null @@ -1,68 +0,0 @@ -# Submitting A Workflow Via Automation - -![GA](assets/ga.svg) - -> v2.8 and after - -You may want to consider using [events](events.md) or [webhooks](webhooks.md) instead. - -Firstly, to do any automation, you'll need an ([access token](access-token.md)). For this example, our role needs extra permissions: - -```sh -kubectl patch role jenkins -p '{"rules": [{"apiGroups": ["argoproj.io"], "resources": ["workflowtemplates"], "verbs": ["get"]}, {"apiGroups": ["argoproj.io"], "resources": ["workflows"], "verbs": ["create", "list", "get", "update"]}]}' -``` - -Next, create a workflow template - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: WorkflowTemplate -metadata: - name: hello-argo -spec: - entrypoint: main - templates: - - name: main - steps: - - - name: a - template: whalesay - - name: whalesay - container: - image: docker/whalesay:latest -``` - -You can submit this workflow via an CLI or the [Argo Server API](rest-api.md). - -Submit via CLI (note how I add a label to help identify it later on): - -````sh -argo submit --from wftmpl/hello-argo -l workflows.argoproj.io/workflow-template=hello-argo -```` - -Or submit via API: - -```sh -curl $ARGO_SERVER/api/v1/workflows/argo/submit \ - -fs \ - -H "Authorization: $ARGO_TOKEN" \ - -d '{"resourceKind": "WorkflowTemplate", "resourceName": "hello-argo", "submitOptions": {"labels": "workflows.argoproj.io/workflow-template=hello-argo"}}' -``` - -You'll see that the workflow has been created: - -```sh -argo list -NAME STATUS AGE DURATION PRIORITY -hello-argo-77m4l Running 33s 33s 0 -``` - -See also: - -See also: - -* [access token](access-token.md) -* [events](events.md) -* [webhooks](webhooks.md) -* [resuming a workflow via automation](resuming-workflow-via-automation.md) -* [one workflow submitting another](workflow-submitting-workflow.md) -* [async pattern](async-pattern.md) diff --git a/docs/survey-data-privacy.md b/docs/survey-data-privacy.md index 5d6345299ed4..fbeeb3f140ec 100644 --- a/docs/survey-data-privacy.md +++ b/docs/survey-data-privacy.md @@ -1,3 +1,3 @@ # Survey Data Privacy -[Privacy policy](https://www.linuxfoundation.org/privacy-policy/) \ No newline at end of file +[Privacy policy](https://www.linuxfoundation.org/privacy-policy/) diff --git a/docs/swagger.md b/docs/swagger.md index 143f6cdb00b1..c6a8cbfd5f47 100644 --- a/docs/swagger.md +++ b/docs/swagger.md @@ -1,3 +1,3 @@ -# Argo Server API +# API Reference [Open the Swagger API docs](https://raw.githubusercontent.com/argoproj/argo-workflows/master/api/openapi-spec/swagger.json). diff --git a/docs/synchronization.md b/docs/synchronization.md index f3e47ed7ad36..3eedd463a767 100644 --- a/docs/synchronization.md +++ b/docs/synchronization.md @@ -1,19 +1,19 @@ # Synchronization -![GA](assets/ga.svg) - > v2.10 and after ## Introduction -Synchronization enables users to limit the parallel execution of certain workflows or + +Synchronization enables users to limit the parallel execution of certain workflows or templates within a workflow without having to restrict others. -Users can create multiple synchronization configurations in the `ConfigMap` that can be referred to +Users can create multiple synchronization configurations in the `ConfigMap` that can be referred to from a workflow or template within a workflow. Alternatively, users can configure a mutex to prevent concurrent execution of templates or workflows using the same mutex. For example: + ```yaml apiVersion: v1 kind: ConfigMap @@ -21,13 +21,14 @@ metadata: name: my-config data: workflow: "1" # Only one workflow can run at given time in particular namespace - template: "2" # Two instance of template can run at a given time in particular namespace + template: "2" # Two instances of template can run at a given time in particular namespace ``` ### Workflow-level Synchronization -Workflow-level synchronization limits parallel execution of the workflow if workflow have same synchronization reference. -In this example, Workflow refers `workflow` synchronization key which is configured as rate limit 1, -so only one workflow instance will be executed at given time even multiple workflows created. + +Workflow-level synchronization limits parallel execution of the workflow if workflows have the same synchronization reference. +In this example, Workflow refers to `workflow` synchronization key which is configured as limit 1, +so only one workflow instance will be executed at given time even multiple workflows created. Using a semaphore configured by a `ConfigMap`: @@ -72,9 +73,10 @@ spec: ``` ### Template-level Synchronization -Template-level synchronization limits parallel execution of the template across workflows, if template have same synchronization reference. -In this example, `acquire-lock` template has synchronization reference of `template` key which is configured as rate limit 2, -so, two instance of templates will be executed at given time even multiple step/task with in workflow or different workflow refers same template. + +Template-level synchronization limits parallel execution of the template across workflows, if templates have the same synchronization reference. +In this example, `acquire-lock` template has synchronization reference of `template` key which is configured as limit 2, +so two instances of templates will be executed at a given time: even multiple steps/tasks within workflow or different workflows referring to the same template. Using a semaphore configured by a `ConfigMap`: @@ -139,14 +141,14 @@ spec: ``` Examples: + 1. [Workflow level semaphore](https://github.com/argoproj/argo-workflows/blob/master/examples/synchronization-wf-level.yaml) 1. [Workflow level mutex](https://github.com/argoproj/argo-workflows/blob/master/examples/synchronization-mutex-wf-level.yaml) 1. [Step level semaphore](https://github.com/argoproj/argo-workflows/blob/master/examples/synchronization-tmpl-level.yaml) 1. [Step level mutex](https://github.com/argoproj/argo-workflows/blob/master/examples/synchronization-mutex-tmpl-level.yaml) -### Other Parallelism support: -In addition to this synchronization, the workflow controller supports a parallelism setting that applies to all workflows -in the system (it is not granular to a class of workflows, or tasks withing them). Furthermore, there is a parallelism setting -at the workflow and template level, but this only restricts total concurrent executions of tasks within the same workflow. - +### Other Parallelism support +In addition to this synchronization, the workflow controller supports a parallelism setting that applies to all workflows +in the system (it is not granular to a class of workflows, or tasks withing them). Furthermore, there is a parallelism setting +at the workflow and template level, but this only restricts total concurrent executions of tasks within the same workflow. diff --git a/docs/template-defaults.md b/docs/template-defaults.md index 1ead1c37525e..16d4fe3f6f50 100644 --- a/docs/template-defaults.md +++ b/docs/template-defaults.md @@ -1,13 +1,15 @@ # Template Defaults + > v3.1 and after ## Introduction `TemplateDefaults` feature enables the user to configure the default template values in workflow spec level that will apply to all the templates in the workflow. If the template has a value that also has a default value in `templateDefault`, the Template's value will take precedence. These values will be applied during the runtime. Template values and default values are merged using Kubernetes strategic merge patch. To check whether and how list values are merged, inspect the `patchStrategy` and `patchMergeKey` tags in the [workflow definition](https://github.com/argoproj/argo-workflows/blob/master/pkg/apis/workflow/v1alpha1/workflow_types.go). -## Configuring `templateDefaults` in WorkflowSpec +## Configuring `templateDefaults` in `WorkflowSpec` For example: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -24,10 +26,12 @@ spec: container: image: docker/whalesay:latest ``` + [template defaults example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/template-defaults.yaml) ## Configuring `templateDefaults` in Controller Level -Operator can configure the `templateDefaults` in [workflowDefaults](default-workflow-specs.md). This `templateDefault` will be applied to all the workflow which runs on the controller. + +Operator can configure the `templateDefaults` in [workflow defaults](default-workflow-specs.md). This `templateDefault` will be applied to all the workflow which runs on the controller. The following would be specified in the Config Map: diff --git a/docs/tls.md b/docs/tls.md index 283417aa51c8..1b5ea436acf5 100644 --- a/docs/tls.md +++ b/docs/tls.md @@ -1,13 +1,11 @@ # Transport Layer Security -![GA](assets/ga.svg) - > v2.8 and after If you're running Argo Server you have three options with increasing transport security (note - you should also be running [authentication](argo-server.md#auth-mode)): -### Default configuration: +## Default configuration > v2.8 - 2.12 @@ -17,32 +15,32 @@ Defaults to [Plain Text](#plain-text) Defaults to [Encrypted](#encrypted) if cert is available -Argo image/deployment defaults to [Encrypted](#encrypted) with a self-signed certificate expires after 365 days. +Argo image/deployment defaults to [Encrypted](#encrypted) with a self-signed certificate which expires after 365 days. ## Plain Text -*Recommended for: dev* +Recommended for: development. Everything is sent in plain text. -Start Argo Server with the --secure=false (or ARGO_SECURE=false) flag, e.g.: +Start Argo Server with the --secure=false (or `ARGO_SECURE=false`) flag, e.g.: -``` +```bash export ARGO_SECURE=false -argo --secure=false +argo server --secure=false ``` To secure the UI you may front it with a HTTPS proxy. ## Encrypted -*Recommended for: development and test environments* +Recommended for: development and test environments. You can encrypt connections without any real effort. Start Argo Server with the `--secure` flag, e.g.: -``` +```bash argo server --secure ``` @@ -50,11 +48,11 @@ It will start with a self-signed certificate that expires after 365 days. Run the CLI with `--secure` (or `ARGO_SECURE=true`) and `--insecure-skip-verify` (or `ARGO_INSECURE_SKIP_VERIFY=true`). -``` +```bash argo --secure --insecure-skip-verify list ``` -``` +```bash export ARGO_SECURE=true export ARGO_INSECURE_SKIP_VERIFY=true argo --secure --insecure-skip-verify list @@ -63,7 +61,7 @@ argo --secure --insecure-skip-verify list Tip: Don't forget to update your readiness probe to use HTTPS. To do so, edit your `argo-server` Deployment's `readinessProbe` spec: -``` +```yaml readinessProbe: httpGet: scheme: HTTPS @@ -71,14 +69,14 @@ readinessProbe: ### Encrypted and Verified -*Recommended for: production environments* +Recommended for: production environments. -Run your HTTPS proxy in front of the Argo Server. You'll need to set-up your certificates and this out of scope of this -documentation. +Run your HTTPS proxy in front of the Argo Server. You'll need to set-up your certificates (this is out of scope of this +documentation). Start Argo Server with the `--secure` flag, e.g.: -``` +```bash argo server --secure ``` @@ -86,11 +84,11 @@ As before, it will start with a self-signed certificate that expires after 365 d Run the CLI with `--secure` (or `ARGO_SECURE=true`) only. -``` +```bash argo --secure list ``` -``` +```bash export ARGO_SECURE=true argo list ``` @@ -107,4 +105,3 @@ This must be one of these [int values](https://golang.org/pkg/crypto/tls/). | v1.1 | 770 | | v1.2 | 771 | | v1.3 | 772 | - diff --git a/docs/tolerating-pod-deletion.md b/docs/tolerating-pod-deletion.md index 628fdb2ad7a9..8024adec7883 100644 --- a/docs/tolerating-pod-deletion.md +++ b/docs/tolerating-pod-deletion.md @@ -5,8 +5,8 @@ In Kubernetes, pods are cattle and can be deleted at any time. Deletion could be manually via `kubectl delete pod`, during a node drain, or for other reasons. This can be very inconvenient, your workflow will error, but for reasons outside of your control. - -A [pod disruption budget](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/default-pdb-support.yaml) can reduce the likelihood of this happening. But, it cannot entirely prevent it. + +A [pod disruption budget](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/default-pdb-support.yaml) can reduce the likelihood of this happening. But, it cannot entirely prevent it. To retry pods that were deleted, set `retryStrategy.retryPolicy: OnError`. @@ -35,4 +35,6 @@ spec: - 30s ``` -Then execute `kubectl delete pod example`. You'll see that the errored node is automatically retried. \ No newline at end of file +Then execute `kubectl delete pod example`. You'll see that the errored node is automatically retried. + +💡 Read more on [architecting workflows for reliability](https://blog.argoproj.io/architecting-workflows-for-reliability-d33bd720c6cc). diff --git a/docs/training.md b/docs/training.md index 170277b13886..efe7e56c5405 100644 --- a/docs/training.md +++ b/docs/training.md @@ -1,18 +1,11 @@ # Training -## Hands-On - -We've created a Katacoda course featuring beginner and intermediate lessons. These allow to you try out Argo Workflows -in your web browser without needing to install anything on your computer. Each lesson starts up a Kubernetes cluster -that you can access via a web browser. - -The course will take around 2 hours to complete and is the fastest way to learn Argo Workflows. - -[![Katacoda Screeshot](assets/katacoda.png) Open the course](https://www.katacoda.com/argoproj/courses/argo-workflows/) - ## Videos -We also have a playlist of videos that dive into various topics. This includes contributing to Argo Workflows, not -covered in the hand-on. +We also have a YouTube playlist of videos that includes workshops you can follow along with: [![Videos Screenshot](assets/videos.png) Open the playlist](https://youtube.com/playlist?list=PLGHfqDpnXFXLHfeapfvtt9URtUF1geuBo) + +## Additional resources + +Visit the [awesome-argo GitHub repo](https://github.com/terrytangyuan/awesome-argo) for more educational resources. diff --git a/docs/upgrading.md b/docs/upgrading.md index 76ed9f7a02a5..a46988d51f67 100644 --- a/docs/upgrading.md +++ b/docs/upgrading.md @@ -1,14 +1,79 @@ - -# Upgrading +# Upgrading Guide Breaking changes typically (sometimes we don't realise they are breaking) have "!" in the commit message, as per the [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/#summary). +## Upgrading to v3.4 + +### Non-Emissary executors are removed. ([#7829](https://github.com/argoproj/argo-workflows/issues/7829)) + +Emissary executor is now the only supported executor. If you are using other executors, e.g. docker, k8sapi, pns, and kubelet, you need to +remove your `containerRuntimeExecutors` and `containerRuntimeExecutor` from your controller's configmap. If you have workflows that use different +executors with the label `workflows.argoproj.io/container-runtime-executor`, this is no longer supported and will not be effective. + +### chore!: Remove dataflow pipelines from codebase. (#9071) + +You are affected if you are using [dataflow pipelines](https://github.com/argoproj-labs/argo-dataflow) in the UI or via the `/pipelines` endpoint. +We no longer support dataflow pipelines and all relevant code has been removed. + +### feat!: Add entrypoint lookup. Fixes #8344 + +Affected if: + +* Using the Emissary executor. +* Used the `args` field for any entry in `images`. + +This PR automatically looks up the command and entrypoint. The implementation for config look-up was incorrect (it +allowed you to specify `args` but not `entrypoint`). `args` has been removed to correct the behaviour. + +If you are incorrectly configured, the workflow controller will error on start-up. + +#### Actions + +You don't need to configure images that use v2 manifests anymore. You can just remove them (e.g. argoproj/argosay:v2): + +```bash +% docker manifest inspect argoproj/argosay:v2 +... +"schemaVersion": 2, +... +``` + +For v1 manifests (e.g. docker/whalesay:latest): + +```bash +% docker image inspect -f '{{.Config.Entrypoint}} {{.Config.Cmd}}' docker/whalesay:latest +[] [/bin/bash] +``` + +```yaml +images: + docker/whalesay:latest: + cmd: [/bin/bash] +``` + +## feat: Fail on invalid config. (#8295) + +The workflow controller will error on start-up if incorrectly configured, rather than silently ignoring +mis-configuration. + +```text +Failed to register watch for controller config map: error unmarshaling JSON: while decoding JSON: json: unknown field \"args\" +``` + +## feat: add indexes for improve archived workflow performance. (#8860) + +This PR adds indexes to archived workflow tables. This change may cause a long time to upgrade if the user has a large table. + +## feat: enhance artifact visualization (#8655) + +For AWS users using S3: visualizing artifacts in the UI and downloading them now requires an additional "Action" to be configured in your S3 bucket policy: "ListBucket". + ## Upgrading to v3.3 ### [662a7295b](https://github.com/argoproj/argo-workflows/commit/662a7295b) feat: Replace `patch pod` with `create workflowtaskresult`. Fixes #3961 (#8000) -The PR changes the permissions that can be used by a workflow to remove the `pod patch` permission. +The PR changes the permissions that can be used by a workflow to remove the `pod patch` permission. See [workflow RBAC](workflow-rbac.md) and [#8013](https://github.com/argoproj/argo-workflows/issues/3961). @@ -38,7 +103,7 @@ See [#8013](https://github.com/argoproj/argo-workflows/issues/8013). This PR removes the following configmap items - -- executorImage (use executor.image in configmap instead) +* executorImage (use executor.image in configmap instead) e.g. Workflow controller configmap similar to the following one given below won't be valid anymore: @@ -67,7 +132,7 @@ This PR removes the following configmap items - ... ``` -- executorImagePullPolicy (use executor.imagePullPolicy in configmap instead) +* executorImagePullPolicy (use executor.imagePullPolicy in configmap instead) e.g. Workflow controller configmap similar to the following one given below won't be valid anymore: @@ -88,7 +153,7 @@ This PR removes the following configmap items - ... ``` -- executorResources (use executor.resources in configmap instead) +* executorResources (use executor.resources in configmap instead) e.g. Workflow controller configmap similar to the following one given below won't be valid anymore: @@ -123,7 +188,7 @@ This PR removes the following configmap items - ### [fce82d572](https://github.com/argoproj/argo-workflows/commit/fce82d5727b89cfe49e8e3568fff40725bd43734) feat: Remove pod workers (#7837) -This PR removes pod workers from the code, the pod informer directly writes into the workflow queue. As a result the `--pod-workers` flag has been removed. +This PR removes pod workers from the code, the pod informer directly writes into the workflow queue. As a result the `--pod-workers` flag has been removed. ### [93c11a24ff](https://github.com/argoproj/argo-workflows/commit/93c11a24ff06049c2197149acd787f702e5c1f9b) feat: Add TLS to Metrics and Telemetry servers (#7041) @@ -146,7 +211,7 @@ HTTPArtifact without a scheme will now defaults to https instead of http user need to explicitly include a http prefix if they want to retrieve HTTPArtifact through http -### chore!: Remove the hidden flag `--verify` from `argo submit`. +### chore!: Remove the hidden flag `--verify` from `argo submit` The hidden flag `--verify` has been removed from `argo submit`. This is a internal testing flag we don't need anymore. @@ -154,7 +219,7 @@ The hidden flag `--verify` has been removed from `argo submit`. This is a intern ### [e5b131a33](https://github.com/argoproj/argo-workflows/commit/e5b131a33) feat: Add template node to pod name. Fixes #1319 (#6712) -This add the template name to the pod name, to make it easier to understand which pod ran which step. This behaviour can be reverted by setting `POD_NAMES=v1` on the workflow controller. +This add the template name to the pod name, to make it easier to understand which pod ran which step. This behaviour can be reverted by setting `POD_NAMES=v1` on the workflow controller. ### [be63efe89](https://github.com/argoproj/argo-workflows/commit/be63efe89) feat(executor)!: Change `argoexec` base image to alpine. Closes #5720 (#6006) @@ -204,7 +269,7 @@ always play nicely with the `when` condition syntax (Goevaluate). This can be resolved using a single quote in your when expression: -``` +```yaml when: "'{{inputs.parameters.should-print}}' != '2021-01-01'" ``` diff --git a/docs/use-cases/infrastructure-automation.md b/docs/use-cases/infrastructure-automation.md index 080cd1cc0a5d..200019395d5a 100644 --- a/docs/use-cases/infrastructure-automation.md +++ b/docs/use-cases/infrastructure-automation.md @@ -7,7 +7,7 @@ ## Videos -* [Infrastructure Automation with Argo at InsideBoard - Alexandre Le Mao (Head of infrastructure / Lead DevOps, Insideboard)](https://www.youtube.com/watch?v=BochC4GKxbo&list=PLGHfqDpnXFXK4E8XzasScagiJk-8BPgva&index=2&utm_source=argo-docs) +* [Infrastructure Automation with Argo at InsideBoard - Alexandre Le Mao (Head of infrastructure / Lead DevOps, InsideBoard)](https://www.youtube.com/watch?v=BochC4GKxbo&list=PLGHfqDpnXFXK4E8XzasScagiJk-8BPgva&index=2&utm_source=argo-docs) * [Argo and KNative - David Breitgand (IBM) - showing 5G infra automation use case](https://youtu.be/dxX_Xnp2sX4?t=210&utm_source=argo-docs) * [How New Relic Uses Argo Workflows - Fischer Jemison, Jared Welch (New Relic)](https://youtu.be/dxX_Xnp2sX4?t=1890&utm_source=argo-docs) * [Building Kubernetes using Kubernetes - Tomas Valasek (SAP Concur)](https://youtu.be/TLTxv2F5WCQ?t=1742&utm_source=argo-docs) diff --git a/docs/use-cases/machine-learning.md b/docs/use-cases/machine-learning.md index 709204bbf61e..2af8c74fa97c 100644 --- a/docs/use-cases/machine-learning.md +++ b/docs/use-cases/machine-learning.md @@ -14,8 +14,8 @@ * [Building Medical Grade AI with Argo Workflows](https://youtu.be/4VPSktuM5Ow) * [CI/CD for Machine Learning at MLB using Argo Workflows - Eric Meadows](https://youtu.be/fccWoYlwZKc?t=184&utm_source=argo-docs) * [Dynamic, Event-Driven Machine Learning Pipelines with Argo Workflows](https://youtu.be/ei4r0a7eAV0) -* [Machine Learning as Code: GitOps for ML with Kubeflow and ArgoCD](https://www.youtube.com/watch?v=VXrGp5er1ZE&t=0s&index=135&list=PLj6h78yzYM2PZf9eA7bhWnIh_mK1vyOfU) +* [Machine Learning as Code: GitOps for ML with Kubeflow and Argo CD](https://www.youtube.com/watch?v=VXrGp5er1ZE&t=0s&index=135&list=PLj6h78yzYM2PZf9eA7bhWnIh_mK1vyOfU) * [Machine Learning with Argo and Ploomber](https://www.youtube.com/watch?v=FnpXyg-5W_c&list=PLGHfqDpnXFXK4E8XzasScagiJk-8BPgva&utm_source=argo-docs) * [Making Complex R Forecast Applications Into Production Using Argo Workflows](https://www.youtube.com/watch?v=fPjztsUXHcg) -* [MLOps at Tripadvisor: ML Models CI/CD Automation with Argo - Ang Gao (Principal Software Engineer, TripAdvisor)](https://www.youtube.com/watch?v=BochC4GKxbo&list=PLGHfqDpnXFXK4E8XzasScagiJk-8BPgva&index=2&utm_source=argo-docs) +* [MLOps at TripAdvisor: ML Models CI/CD Automation with Argo - Ang Gao (Principal Software Engineer, TripAdvisor)](https://www.youtube.com/watch?v=BochC4GKxbo&list=PLGHfqDpnXFXK4E8XzasScagiJk-8BPgva&index=2&utm_source=argo-docs) * [Towards Cloud-Native Distributed Machine Learning Pipelines at Scale](https://github.com/terrytangyuan/public-talks/tree/main/talks/towards-cloud-native-distributed-machine-learning-pipelines-at-scale-pydata-global-2021) diff --git a/docs/use-cases/stream-processing.md b/docs/use-cases/stream-processing.md index fa19fda79d09..ea13c1bb12ca 100644 --- a/docs/use-cases/stream-processing.md +++ b/docs/use-cases/stream-processing.md @@ -1,3 +1,3 @@ # Stream Processing -Head to the [ArgoLabs Dataflow](https://github.com/argoproj-labs/argo-dataflow) docs. \ No newline at end of file +Head to the [ArgoLabs Dataflow](https://github.com/argoproj-labs/argo-dataflow) docs. diff --git a/docs/use-cases/webhdfs.md b/docs/use-cases/webhdfs.md new file mode 100644 index 000000000000..8acbfc80fc4a --- /dev/null +++ b/docs/use-cases/webhdfs.md @@ -0,0 +1,45 @@ +# Using webHDFS protocol via HTTP artifacts + +webHDFS is a protocol allowing to access Hadoop or similar a data storage via a unified REST API (). + +## Input Artifacts + +In order to use the webHDFS protocol we will make use of HTTP artifacts, where the URL will be set to the webHDFS endpoint including the file path and all its query parameters. Suppose, our webHDFS endpoint is available under `https://mywebhdfsprovider.com/webhdfs/v1/` and we have a file `my-art.txt` located in a `data` folder, which we want to use as an input artifact. To construct the HTTP URL we need to append the file path to the base webHDFS endpoint and set the [OPEN operation](https://hadoop.apache.org/docs/r3.3.3/hadoop-project-dist/hadoop-hdfs/WebHDFS.html#Open_and_Read_a_File) in the HTTP URL parameter. This results in the following URL: `https://mywebhdfsprovider.com/webhdfs/v1/data/my-art.txt?op=OPEN`. This is all you need for webHDFS input artifacts to work! Now, when run, the workflow will download the specified webHDFS artifact into the given `path`. There are some additional fields that can be set for HTTP artifacts (e.g. HTTP headers), which you can find in the [full webHDFS example](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml). + +```yaml +spec: + [...] + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: "https://mywebhdfsprovider.com/webhdfs/v1/file.txt?op=OPEN" +``` + +## Output Artifacts + +In order to declare a webHDFS output artifact, little change is necessary: We only need to change the webHDFS operation to the [CREATE operation](https://hadoop.apache.org/docs/r3.3.3/hadoop-project-dist/hadoop-hdfs/WebHDFS.html#Create_and_Write_to_a_File) and set the file path to where we want the output artifact to be stored. In this example we want to store the artifact under `outputs/newfile.txt`. We also supply the optional overwrite parameter `overwrite=true` to allow overwriting existing files in the webHDFS provider's data storage. If the `overwrite` flag is unset, the default behavior is used, which depends on the particular webHDFS provider. Below shows the example output artifact: + +```yaml +spec: + [...] + outputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: "https://mywebhdfsprovider.com/webhdfs/v1/outputs/newfile.txt?op=CREATE&overwrite=true" +``` + +## Authentication + +Above example showed a minimal use case without any authentication. However, in a real-world scenario, you may want to provide some authentication option. Currently, Argo Workflows' HTTP artifacts support the following authentication mechanisms: + +- HTTP Basic Auth +- OAuth2 +- Client Certificates + +Hence, the authentication mechanism that can be used for webHDFS artifacts are limited to those supported by HTTP artifacts. Examples for the latter two authentication mechanisms can be found in the [webHDFS example file](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml). + +**Limitation**: Apache Hadoop itself only supports authentication via Kerberos SPNEGO and Hadoop delegation token (see ). While the former one is currently not supported for HTTP artifacts a usage of delegation tokens can be realized by supplying the authentication token in the HTTP URL of the respective input or output artifact. diff --git a/docs/variables.md b/docs/variables.md index ac6a92cdd787..da5892488aa8 100644 --- a/docs/variables.md +++ b/docs/variables.md @@ -25,10 +25,10 @@ spec: container: image: docker/whalesay command: [ cowsay ] - args: [ "{{inputs.parameters.message}}" ] + args: [ "{{inputs.parameters.message}}" ] ``` -The following variables are made available to reference various metadata of a workflow: +The following variables are made available to reference various meta-data of a workflow: ## Template Tag Kinds @@ -41,7 +41,7 @@ There are two kinds of template tag: The tag is substituted with the variable that has a name the same as the tag. -Simple tags **may** have whitespace between the brackets and variable. +Simple tags **may** have white-space between the brackets and variable as seen below. However, there is a known issue where variables may fail to interpolate with white-space, so it is recommended to avoid using white-space until this issue is resolved. [Please report](https://github.com/argoproj/argo-workflows/issues/8960) unexpected behavior with reproducible examples. ```yaml args: [ "{{ inputs.parameters.message }}" ] @@ -62,19 +62,19 @@ indexing into the parameter or step map, e.g. `inputs.parameters['my-param']` or Plain list: -``` +```text [1, 2] ``` Filter a list: -``` +```text filter([1, 2], { # > 1}) ``` Map a list: -``` +```text map([1, 2], { # * 2 }) ``` @@ -82,31 +82,31 @@ We provide some core functions: Cast to int: -``` +```text asInt(inputs.parameters['my-int-param']) ``` Cast to float: -``` +```text asFloat(inputs.parameters['my-float-param']) ``` Cast to string: -``` +```text string(1) ``` Convert to a JSON string (needed for `withParam`): -``` +```text toJson([1, 2]) ``` Extract data from JSON: -``` +```text jsonpath(inputs.parameters.json, '$.some.path') ``` @@ -114,7 +114,7 @@ You can also use [Sprig functions](http://masterminds.github.io/sprig/): Trim a string: -``` +```text sprig.trim(inputs.parameters['my-string-param']) ``` @@ -135,30 +135,32 @@ returns `0`. Please review the Sprig documentation to understand which functions | Variable | Description| |----------|------------| +| `steps.name` | Name of the step | | `steps..id` | unique id of container step | | `steps..ip` | IP address of a previous daemon container step | | `steps..status` | Phase status of any previous step | | `steps..exitCode` | Exit code of any previous script or container step | -| `steps..startedAt` | Timestamp when the step started | -| `steps..finishedAt` | Timestamp when the step finished | +| `steps..startedAt` | Time-stamp when the step started | +| `steps..finishedAt` | Time-stamp when the step finished | | `steps..outputs.result` | Output result of any previous container or script step | -| `steps..outputs.parameters` | When the previous step uses 'withItems' or 'withParams', this contains a JSON array of the output parameter maps of each invocation | -| `steps..outputs.parameters.` | Output parameter of any previous step. When the previous step uses 'withItems' or 'withParams', this contains a JSON array of the output parameter values of each invocation | +| `steps..outputs.parameters` | When the previous step uses `withItems` or `withParams`, this contains a JSON array of the output parameter maps of each invocation | +| `steps..outputs.parameters.` | Output parameter of any previous step. When the previous step uses `withItems` or `withParams`, this contains a JSON array of the output parameter values of each invocation | | `steps..outputs.artifacts.` | Output artifact of any previous step | ### DAG Templates | Variable | Description| |----------|------------| +| `tasks.name` | Name of the task | | `tasks..id` | unique id of container task | | `tasks..ip` | IP address of a previous daemon container task | | `tasks..status` | Phase status of any previous task | | `tasks..exitCode` | Exit code of any previous script or container task | -| `tasks..startedAt` | Timestamp when the task started | -| `tasks..finishedAt` | Timestamp when the task finished | +| `tasks..startedAt` | Time-stamp when the task started | +| `tasks..finishedAt` | Time-stamp when the task finished | | `tasks..outputs.result` | Output result of any previous container or script task | -| `tasks..outputs.parameters` | When the previous task uses 'withItems' or 'withParams', this contains a JSON array of the output parameter maps of each invocation | -| `tasks..outputs.parameters.` | Output parameter of any previous task. When the previous task uses 'withItems' or 'withParams', this contains a JSON array of the output parameter values of each invocation | +| `tasks..outputs.parameters` | When the previous task uses `withItems` or `withParams`, this contains a JSON array of the output parameter maps of each invocation | +| `tasks..outputs.parameters.` | Output parameter of any previous task. When the previous task uses `withItems` or `withParams`, this contains a JSON array of the output parameter values of each invocation | | `tasks..outputs.artifacts.` | Output artifact of any previous task | ### HTTP Templates @@ -177,7 +179,7 @@ Only available for `successCondition` | `response.body` | Response body (`string`) | | `response.headers` | Response headers (`map[string][]string`) | -### RetryStrategy +### `RetryStrategy` When using the `expression` field within `retryStrategy`, special variables are available. @@ -194,12 +196,12 @@ Note: These variables evaluate to a string type. If using advanced expressions, | Variable | Description| |----------|------------| | `pod.name` | Pod name of the container/script | -| `retries` | The retry number of the container/script if retryStrategy is specified | +| `retries` | The retry number of the container/script if `retryStrategy` is specified | | `inputs.artifacts..path` | Local path of the input artifact | | `outputs.artifacts..path` | Local path of the output artifact | | `outputs.parameters..path` | Local path of the output parameter | -### Loops (withItems / withParam) +### Loops (`withItems` / `withParam`) | Variable | Description| |----------|------------| @@ -221,9 +223,9 @@ step. | `outputs.result` | Output result of the metric-emitting template | | `resourcesDuration.{cpu,memory}` | Resources duration **in seconds**. Must be one of `resourcesDuration.cpu` or `resourcesDuration.memory`, if available. For more info, see the [Resource Duration](resource-duration.md) doc.| -### Realtime Metrics +### Real-Time Metrics -Some variables can be emitted in realtime (as opposed to just when the step/task completes). To emit these variables in +Some variables can be emitted in real-time (as opposed to just when the step/task completes). To emit these variables in real time, set `realtime: true` under `gauge` (note: only Gauge metrics allow for real time variable emission). Metrics currently available for real time emission: @@ -249,12 +251,12 @@ For `Template`-level metrics: | `workflow.outputs.artifacts.` | Global artifact in the workflow | | `workflow.annotations.` | Workflow annotations | | `workflow.labels.` | Workflow labels | -| `workflow.creationTimestamp` | Workflow creation timestamp formatted in RFC 3339 (e.g. `2018-08-23T05:42:49Z`) | -| `workflow.creationTimestamp.` | Creation timestamp formatted with a [strftime](http://strftime.org) format character. | -| `workflow.creationTimestamp.RFC3339` | Creation timestamp formatted with in RFC 3339. | +| `workflow.creationTimestamp` | Workflow creation time-stamp formatted in RFC 3339 (e.g. `2018-08-23T05:42:49Z`) | +| `workflow.creationTimestamp.` | Creation time-stamp formatted with a [`strftime`](http://strftime.org) format character. | +| `workflow.creationTimestamp.RFC3339` | Creation time-stamp formatted with in RFC 3339. | | `workflow.priority` | Workflow priority | | `workflow.duration` | Workflow duration estimate, may differ from actual duration by a couple of seconds | -| `workflow.scheduledTime` | Scheduled runtime formatted in RFC 3339 (only available for CronWorkflows) | +| `workflow.scheduledTime` | Scheduled runtime formatted in RFC 3339 (only available for `CronWorkflow`) | ### Exit Handler diff --git a/docs/versioning.md b/docs/versioning.md deleted file mode 100644 index 1689b36d69e6..000000000000 --- a/docs/versioning.md +++ /dev/null @@ -1,9 +0,0 @@ -# Versioning - -Argo Workflows does not use Semantic Versioning, even though we have not introduced any breaking changes since v2. - -Breaking changes will be communicated in the release notes. - -See: - -* [Public API](public-api.md) diff --git a/docs/walk-through/argo-cli.md b/docs/walk-through/argo-cli.md new file mode 100644 index 000000000000..196e5871b18e --- /dev/null +++ b/docs/walk-through/argo-cli.md @@ -0,0 +1,22 @@ +# Argo CLI + +In case you want to follow along with this walk-through, here's a quick overview of the most useful argo command line interface (CLI) commands. + +```bash +argo submit hello-world.yaml # submit a workflow spec to Kubernetes +argo list # list current workflows +argo get hello-world-xxx # get info about a specific workflow +argo logs hello-world-xxx # print the logs from a workflow +argo delete hello-world-xxx # delete workflow +``` + +You can also run workflow specs directly using `kubectl` but the Argo CLI provides syntax checking, nicer output, and requires less typing. + +```bash +kubectl create -f hello-world.yaml +kubectl get wf +kubectl get wf hello-world-xxx +kubectl get po --selector=workflows.argoproj.io/workflow=hello-world-xxx --show-all # similar to argo +kubectl logs hello-world-xxx-yyy -c main +kubectl delete wf hello-world-xxx +``` diff --git a/docs/walk-through/artifacts.md b/docs/walk-through/artifacts.md new file mode 100644 index 000000000000..bf8ee1ebfefc --- /dev/null +++ b/docs/walk-through/artifacts.md @@ -0,0 +1,206 @@ +# Artifacts + +**Note:** +You will need to configure an artifact repository to run this example. +[Configuring an artifact repository here](https://argoproj.github.io/argo-workflows/configure-artifact-repository/). + +When running workflows, it is very common to have steps that generate or consume artifacts. Often, the output artifacts of one step may be used as input artifacts to a subsequent step. + +The below workflow spec consists of two steps that run in sequence. The first step named `generate-artifact` will generate an artifact using the `whalesay` template that will be consumed by the second step named `print-message` that then consumes the generated artifact. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-passing- +spec: + entrypoint: artifact-example + templates: + - name: artifact-example + steps: + - - name: generate-artifact + template: whalesay + - - name: consume-artifact + template: print-message + arguments: + artifacts: + # bind message to the hello-art artifact + # generated by the generate-artifact step + - name: message + from: "{{steps.generate-artifact.outputs.artifacts.hello-art}}" + + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["cowsay hello world | tee /tmp/hello_world.txt"] + outputs: + artifacts: + # generate hello-art artifact from /tmp/hello_world.txt + # artifacts can be directories as well as files + - name: hello-art + path: /tmp/hello_world.txt + + - name: print-message + inputs: + artifacts: + # unpack the message input artifact + # and put it at /tmp/message + - name: message + path: /tmp/message + container: + image: alpine:latest + command: [sh, -c] + args: ["cat /tmp/message"] +``` + +The `whalesay` template uses the `cowsay` command to generate a file named `/tmp/hello-world.txt`. It then `outputs` this file as an artifact named `hello-art`. In general, the artifact's `path` may be a directory rather than just a file. The `print-message` template takes an input artifact named `message`, unpacks it at the `path` named `/tmp/message` and then prints the contents of `/tmp/message` using the `cat` command. +The `artifact-example` template passes the `hello-art` artifact generated as an output of the `generate-artifact` step as the `message` input artifact to the `print-message` step. DAG templates use the tasks prefix to refer to another task, for example `{{tasks.generate-artifact.outputs.artifacts.hello-art}}`. + +Artifacts are packaged as Tarballs and gzipped by default. You may customize this behavior by specifying an archive strategy, using the `archive` field. For example: + +```yaml +<... snipped ...> + outputs: + artifacts: + # default behavior - tar+gzip default compression. + - name: hello-art-1 + path: /tmp/hello_world.txt + + # disable archiving entirely - upload the file / directory as is. + # this is useful when the container layout matches the desired target repository layout. + - name: hello-art-2 + path: /tmp/hello_world.txt + archive: + none: {} + + # customize the compression behavior (disabling it here). + # this is useful for files with varying compression benefits, + # e.g. disabling compression for a cached build workspace and large binaries, + # or increasing compression for "perfect" textual data - like a json/xml export of a large database. + - name: hello-art-3 + path: /tmp/hello_world.txt + archive: + tar: + # no compression (also accepts the standard gzip 1 to 9 values) + compressionLevel: 0 +<... snipped ...> +``` + +## Artifact Garbage Collection + +As of version 3.4 you can configure your Workflow to automatically delete Artifacts that you don't need (presuming you're using S3 - other storage engines still need to be implemented). + +Artifacts can be deleted `OnWorkflowCompletion` or `OnWorkflowDeletion`. You can specify your Garbage Collection strategy on both the Workflow level and the Artifact level, so for example, you may have temporary artifacts that can be deleted right away but a final output that should be persisted: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-gc- +spec: + entrypoint: main + artifactGC: + strategy: OnWorkflowDeletion # default Strategy set here applies to all Artifacts by default + templates: + - name: main + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "can throw this away" > /tmp/temporary-artifact.txt + echo "keep this" > /tmp/keep-this.txt + outputs: + artifacts: + - name: temporary-artifact + path: /tmp/temporary-artifact.txt + s3: + key: temporary-artifact.txt + - name: keep-this + path: /tmp/keep-this.txt + s3: + key: keep-this.txt + artifactGC: + strategy: Never # optional override for an Artifact +``` + +### Artifact Naming + +Consider parameterizing your S3 keys by {{workflow.uid}}, etc (as shown in the example above) if there's a possibility that you could have concurrent Workflows of the same spec. This would be to avoid a scenario in which the artifact from one Workflow is being deleted while the same S3 key is being generated for a different Workflow. + +### Service Accounts and Annotations + +Does your S3 bucket require you to run with a special Service Account or IAM Role Annotation? You can either use the same ones you use for creating artifacts or generate new ones that are specific for deletion permission. Generally users will probably just have a single Service Account or IAM Role to apply to all artifacts for the Workflow, but you can also customize on the artifact level if you need that: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-gc- +spec: + entrypoint: main + artifactGC: + strategy: OnWorkflowDeletion + ############################################################################################## + # Workflow Level Service Account and Metadata + ############################################################################################## + serviceAccountName: my-sa + podMetadata: + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::111122223333:role/my-iam-role + templates: + - name: main + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "can throw this away" > /tmp/temporary-artifact.txt + echo "keep this" > /tmp/keep-this.txt + outputs: + artifacts: + - name: temporary-artifact + path: /tmp/temporary-artifact.txt + s3: + key: temporary-artifact-{{workflow.uid}}.txt + artifactGC: + #################################################################################### + # Optional override capability + #################################################################################### + serviceAccountName: artifact-specific-sa + podMetadata: + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::111122223333:role/artifact-specific-iam-role + - name: keep-this + path: /tmp/keep-this.txt + s3: + key: keep-this-{{workflow.uid}}.txt + artifactGC: + strategy: Never +``` + +If you do supply your own Service Account you will need to create a RoleBinding that binds it with the new `artifactgc` Role. + +### What happens if Garbage Collection fails? + +If deletion of the artifact fails for some reason (other than the Artifact already have been deleted which is not considered a failure), the Workflow's Status will be marked with a new Condition to indicate "Artifact GC Failure", a Kubernetes Event will be issued, and the Argo Server UI will also indicate the failure. In that case, if the user needs to delete the Workflow and its child CRD objects, the user will need to patch the Workflow to remove the finalizer preventing the deletion: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow + finalizers: + - workflows.argoproj.io/artifact-gc +``` + +The finalizer can be deleted by doing: + +```sh +kubectl patch workflow my-wf \ + --type json \ + --patch='[ { "op": "remove", "path": "/metadata/finalizers" } ]' +``` diff --git a/docs/walk-through/conditionals.md b/docs/walk-through/conditionals.md new file mode 100644 index 000000000000..b07c04ec1437 --- /dev/null +++ b/docs/walk-through/conditionals.md @@ -0,0 +1,78 @@ +# Conditionals + +We also support conditional execution. The syntax is implemented by [`govaluate`](https://github.com/Knetic/govaluate) which offers the support for complex syntax. See in the example: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: coinflip- +spec: + entrypoint: coinflip + templates: + - name: coinflip + steps: + # flip a coin + - - name: flip-coin + template: flip-coin + # evaluate the result in parallel + - - name: heads + template: heads # call heads template if "heads" + when: "{{steps.flip-coin.outputs.result}} == heads" + - name: tails + template: tails # call tails template if "tails" + when: "{{steps.flip-coin.outputs.result}} == tails" + - - name: flip-again + template: flip-coin + - - name: complex-condition + template: heads-tails-or-twice-tails + # call heads template if first flip was "heads" and second was "tails" OR both were "tails" + when: >- + ( {{steps.flip-coin.outputs.result}} == heads && + {{steps.flip-again.outputs.result}} == tails + ) || + ( {{steps.flip-coin.outputs.result}} == tails && + {{steps.flip-again.outputs.result}} == tails ) + - name: heads-regex + template: heads # call heads template if ~ "hea" + when: "{{steps.flip-again.outputs.result}} =~ hea" + - name: tails-regex + template: tails # call heads template if ~ "tai" + when: "{{steps.flip-again.outputs.result}} =~ tai" + + # Return heads or tails based on a random number + - name: flip-coin + script: + image: python:alpine3.6 + command: [python] + source: | + import random + result = "heads" if random.randint(0,1) == 0 else "tails" + print(result) + + - name: heads + container: + image: alpine:3.6 + command: [sh, -c] + args: ["echo \"it was heads\""] + + - name: tails + container: + image: alpine:3.6 + command: [sh, -c] + args: ["echo \"it was tails\""] + + - name: heads-tails-or-twice-tails + container: + image: alpine:3.6 + command: [sh, -c] + args: ["echo \"it was heads the first flip and tails the second. Or it was two times tails.\""] +``` + +!!! note +If the parameter value contains quotes, it may invalidate the govaluate expression. To handle parameters with +quotes, embed an [expr](https://github.com/antonmedv/expr) expression in the conditional. For example: + +```yaml +when: "{{=inputs.parameters['may-contain-quotes'] == 'example'}}" +``` diff --git a/docs/walk-through/continuous-integration-examples.md b/docs/walk-through/continuous-integration-examples.md new file mode 100644 index 000000000000..a5ba5c2bba4a --- /dev/null +++ b/docs/walk-through/continuous-integration-examples.md @@ -0,0 +1,15 @@ +# Continuous Integration Examples + +Continuous integration is a popular application for workflows. + +Some quick examples of CI workflows: + +- +- + +And a CI `WorkflowTemplate` example: + +- + +A more detailed example is , which allows you to +create a local CI workflow for the purposes of learning. diff --git a/docs/walk-through/custom-template-variable-reference.md b/docs/walk-through/custom-template-variable-reference.md new file mode 100644 index 000000000000..0c4d0b2b1337 --- /dev/null +++ b/docs/walk-through/custom-template-variable-reference.md @@ -0,0 +1,40 @@ +# Custom Template Variable Reference + +In this example, we can see how we can use the other template language variable reference (E.g: Jinja) in Argo workflow template. +Argo will validate and resolve only the variable that starts with an Argo allowed prefix +{***"item", "steps", "inputs", "outputs", "workflow", "tasks"***} + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: custom-template-variable- +spec: + entrypoint: hello-hello-hello + + templates: + - name: hello-hello-hello + steps: + - - name: hello1 + template: whalesay + arguments: + parameters: [{name: message, value: "hello1"}] + - - name: hello2a + template: whalesay + arguments: + parameters: [{name: message, value: "hello2a"}] + - name: hello2b + template: whalesay + arguments: + parameters: [{name: message, value: "hello2b"}] + + - name: whalesay + inputs: + parameters: + - name: message + container: + image: docker/whalesay + command: [cowsay] + args: ["{{user.username}}"] + +``` diff --git a/docs/walk-through/daemon-containers.md b/docs/walk-through/daemon-containers.md new file mode 100644 index 000000000000..ec147c7ffadd --- /dev/null +++ b/docs/walk-through/daemon-containers.md @@ -0,0 +1,78 @@ +# Daemon Containers + +Argo workflows can start containers that run in the background (also known as `daemon containers`) while the workflow itself continues execution. Note that the daemons will be *automatically destroyed* when the workflow exits the template scope in which the daemon was invoked. Daemon containers are useful for starting up services to be tested or to be used in testing (e.g., fixtures). We also find it very useful when running large simulations to spin up a database as a daemon for collecting and organizing the results. The big advantage of daemons compared with sidecars is that their existence can persist across multiple steps or even the entire workflow. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: daemon-step- +spec: + entrypoint: daemon-example + templates: + - name: daemon-example + steps: + - - name: influx + template: influxdb # start an influxdb as a daemon (see the influxdb template spec below) + + - - name: init-database # initialize influxdb + template: influxdb-client + arguments: + parameters: + - name: cmd + value: curl -XPOST 'http://{{steps.influx.ip}}:8086/query' --data-urlencode "q=CREATE DATABASE mydb" + + - - name: producer-1 # add entries to influxdb + template: influxdb-client + arguments: + parameters: + - name: cmd + value: for i in $(seq 1 20); do curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d "cpu,host=server01,region=uswest load=$i" ; sleep .5 ; done + - name: producer-2 # add entries to influxdb + template: influxdb-client + arguments: + parameters: + - name: cmd + value: for i in $(seq 1 20); do curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d "cpu,host=server02,region=uswest load=$((RANDOM % 100))" ; sleep .5 ; done + - name: producer-3 # add entries to influxdb + template: influxdb-client + arguments: + parameters: + - name: cmd + value: curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d 'cpu,host=server03,region=useast load=15.4' + + - - name: consumer # consume intries from influxdb + template: influxdb-client + arguments: + parameters: + - name: cmd + value: curl --silent -G http://{{steps.influx.ip}}:8086/query?pretty=true --data-urlencode "db=mydb" --data-urlencode "q=SELECT * FROM cpu" + + - name: influxdb + daemon: true # start influxdb as a daemon + retryStrategy: + limit: 10 # retry container if it fails + container: + image: influxdb:1.2 + command: + - influxd + readinessProbe: # wait for readinessProbe to succeed + httpGet: + path: /ping + port: 8086 + + - name: influxdb-client + inputs: + parameters: + - name: cmd + container: + image: appropriate/curl:latest + command: ["/bin/sh", "-c"] + args: ["{{inputs.parameters.cmd}}"] + resources: + requests: + memory: 32Mi + cpu: 100m +``` + +Step templates use the `steps` prefix to refer to another step: for example `{{steps.influx.ip}}`. In DAG templates, the `tasks` prefix is used instead: for example `{{tasks.influx.ip}}`. diff --git a/docs/walk-through/dag.md b/docs/walk-through/dag.md new file mode 100644 index 000000000000..79f26aab4e60 --- /dev/null +++ b/docs/walk-through/dag.md @@ -0,0 +1,49 @@ +# DAG + +As an alternative to specifying sequences of steps, you can define the workflow as a directed-acyclic graph (DAG) by specifying the dependencies of each task. This can be simpler to maintain for complex workflows and allows for maximum parallelism when running tasks. + +In the following workflow, step `A` runs first, as it has no dependencies. Once `A` has finished, steps `B` and `C` run in parallel. Finally, once `B` and `C` have completed, step `D` can run. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: dag-diamond- +spec: + entrypoint: diamond + templates: + - name: echo + inputs: + parameters: + - name: message + container: + image: alpine:3.7 + command: [echo, "{{inputs.parameters.message}}"] + - name: diamond + dag: + tasks: + - name: A + template: echo + arguments: + parameters: [{name: message, value: A}] + - name: B + dependencies: [A] + template: echo + arguments: + parameters: [{name: message, value: B}] + - name: C + dependencies: [A] + template: echo + arguments: + parameters: [{name: message, value: C}] + - name: D + dependencies: [B, C] + template: echo + arguments: + parameters: [{name: message, value: D}] +``` + +The dependency graph may have [multiple roots](https://github.com/argoproj/argo-workflows/tree/master/examples/dag-multiroot.yaml). The templates called from a DAG or steps template can themselves be DAG or steps templates. This can allow for complex workflows to be split into manageable pieces. + +The DAG logic has a built-in `fail fast` feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. +The [FailFast](https://github.com/argoproj/argo-workflows/tree/master/examples/dag-disable-failFast.yaml) flag default is `true`, if set to `false`, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at [here](https://github.com/argoproj/argo-workflows/issues/1442). diff --git a/docs/walk-through/docker-in-docker-using-sidecars.md b/docs/walk-through/docker-in-docker-using-sidecars.md new file mode 100644 index 000000000000..5ac9e41987fa --- /dev/null +++ b/docs/walk-through/docker-in-docker-using-sidecars.md @@ -0,0 +1,35 @@ +# Docker-in-Docker Using Sidecars + +An application of sidecars is to implement Docker-in-Docker (DIND). DIND is useful when you want to run Docker commands from inside a container. For example, you may want to build and push a container image from inside your build container. In the following example, we use the `docker:dind` image to run a Docker daemon in a sidecar and give the main container access to the daemon. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: sidecar-dind- +spec: + entrypoint: dind-sidecar-example + templates: + - name: dind-sidecar-example + container: + image: docker:19.03.13 + command: [sh, -c] + args: ["until docker ps; do sleep 3; done; docker run --rm debian:latest cat /etc/os-release"] + env: + - name: DOCKER_HOST # the docker daemon can be access on the standard port on localhost + value: 127.0.0.1 + sidecars: + - name: dind + image: docker:19.03.13-dind # Docker already provides an image for running a Docker daemon + command: [dockerd-entrypoint.sh] + env: + - name: DOCKER_TLS_CERTDIR # Docker TLS env config + value: "" + securityContext: + privileged: true # the Docker daemon can only run in a privileged container + # mirrorVolumeMounts will mount the same volumes specified in the main container + # to the sidecar (including artifacts), at the same mountPaths. This enables + # dind daemon to (partially) see the same filesystem as the main container in + # order to use features such as docker volume binding. + mirrorVolumeMounts: true +``` diff --git a/docs/walk-through/exit-handlers.md b/docs/walk-through/exit-handlers.md new file mode 100644 index 000000000000..51bf9e3ff720 --- /dev/null +++ b/docs/walk-through/exit-handlers.md @@ -0,0 +1,57 @@ +# Exit handlers + +An exit handler is a template that *always* executes, irrespective of success or failure, at the end of the workflow. + +Some common use cases of exit handlers are: + +- cleaning up after a workflow runs +- sending notifications of workflow status (e.g., e-mail/Slack) +- posting the pass/fail status to a web-hook result (e.g. GitHub build result) +- resubmitting or submitting another workflow + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: exit-handlers- +spec: + entrypoint: intentional-fail + onExit: exit-handler # invoke exit-handler template at end of the workflow + templates: + # primary workflow template + - name: intentional-fail + container: + image: alpine:latest + command: [sh, -c] + args: ["echo intentional failure; exit 1"] + + # Exit handler templates + # After the completion of the entrypoint template, the status of the + # workflow is made available in the global variable {{workflow.status}}. + # {{workflow.status}} will be one of: Succeeded, Failed, Error + - name: exit-handler + steps: + - - name: notify + template: send-email + - name: celebrate + template: celebrate + when: "{{workflow.status}} == Succeeded" + - name: cry + template: cry + when: "{{workflow.status}} != Succeeded" + - name: send-email + container: + image: alpine:latest + command: [sh, -c] + args: ["echo send e-mail: {{workflow.name}} {{workflow.status}} {{workflow.duration}}"] + - name: celebrate + container: + image: alpine:latest + command: [sh, -c] + args: ["echo hooray!"] + - name: cry + container: + image: alpine:latest + command: [sh, -c] + args: ["echo boohoo!"] +``` diff --git a/docs/walk-through/hardwired-artifacts.md b/docs/walk-through/hardwired-artifacts.md new file mode 100644 index 000000000000..f973494ac49c --- /dev/null +++ b/docs/walk-through/hardwired-artifacts.md @@ -0,0 +1,46 @@ +# Hardwired Artifacts + +With Argo, you can use any container image that you like to generate any kind of artifact. In practice, however, we find certain types of artifacts are very common, so there is built-in support for git, HTTP, GCS and S3 artifacts. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: hardwired-artifact- +spec: + entrypoint: hardwired-artifact + templates: + - name: hardwired-artifact + inputs: + artifacts: + # Check out the master branch of the argo repo and place it at /src + # revision can be anything that git checkout accepts: branch, commit, tag, etc. + - name: argo-source + path: /src + git: + repo: https://github.com/argoproj/argo-workflows.git + revision: "master" + # Download kubectl 1.8.0 and place it at /bin/kubectl + - name: kubectl + path: /bin/kubectl + mode: 0755 + http: + url: https://storage.googleapis.com/kubernetes-release/release/v1.8.0/bin/linux/amd64/kubectl + # Copy an s3 compatible artifact repository bucket (such as AWS, GCS and MinIO) and place it at /s3 + - name: objects + path: /s3 + s3: + endpoint: storage.googleapis.com + bucket: my-bucket-name + key: path/in/bucket + accessKeySecret: + name: my-s3-credentials + key: accessKey + secretKeySecret: + name: my-s3-credentials + key: secretKey + container: + image: debian + command: [sh, -c] + args: ["ls -l /src /bin/kubectl /s3"] +``` diff --git a/docs/walk-through/hello-world.md b/docs/walk-through/hello-world.md new file mode 100644 index 000000000000..beef6f120e53 --- /dev/null +++ b/docs/walk-through/hello-world.md @@ -0,0 +1,56 @@ +# Hello World + +Let's start by creating a very simple workflow template to echo "hello world" using the `docker/whalesay` container +image from Docker Hub. + +You can run this directly from your shell with a simple docker command: + +```bash +$ docker run docker/whalesay cowsay "hello world" + _____________ +< hello world > + ------------- + \ + \ + \ + ## . + ## ## ## == + ## ## ## ## === + /""""""""""""""""___/ === + ~~~ {~~ ~~~~ ~~~ ~~~~ ~~ ~ / ===- ~~~ + \______ o __/ + \ \ __/ + \____\______/ + + +Hello from Docker! +This message shows that your installation appears to be working correctly. +``` + +Below, we run the same container on a Kubernetes cluster using an Argo workflow template. Be sure to read the comments +as they provide useful explanations. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow # new type of k8s spec +metadata: + generateName: hello-world- # name of the workflow spec +spec: + entrypoint: whalesay # invoke the whalesay template + templates: + - name: whalesay # name of the template + container: + image: docker/whalesay + command: [ cowsay ] + args: [ "hello world" ] + resources: # limit the resources + limits: + memory: 32Mi + cpu: 100m +``` + +Argo adds a new `kind` of Kubernetes spec called a `Workflow`. The above spec contains a single `template` +called `whalesay` which runs the `docker/whalesay` container and invokes `cowsay "hello world"`. The `whalesay` template +is the `entrypoint` for the spec. The entrypoint specifies the initial template that should be invoked when the workflow +spec is executed by Kubernetes. Being able to specify the entrypoint is more useful when there is more than one template +defined in the Kubernetes workflow spec. :-) diff --git a/docs/walk-through/index.md b/docs/walk-through/index.md new file mode 100644 index 000000000000..f9d4f5ac4a59 --- /dev/null +++ b/docs/walk-through/index.md @@ -0,0 +1,16 @@ +# About + +Argo is implemented as a Kubernetes CRD (Custom Resource Definition). As a result, Argo workflows can be managed +using `kubectl` and natively integrates with other Kubernetes services such as volumes, secrets, and RBAC. The new Argo +software is light-weight and installs in under a minute, and provides complete workflow features including parameter +substitution, artifacts, fixtures, loops and recursive workflows. + +Dozens of examples are available in +the [examples directory](https://github.com/argoproj/argo-workflows/tree/master/examples) on GitHub. + +For a complete description of the Argo workflow spec, please refer +to [the spec documentation](../fields.md#workflowspec). + +Progress through these examples in sequence to learn all the basics. + +Start with [Argo CLI](argo-cli.md). diff --git a/docs/walk-through/kubernetes-resources.md b/docs/walk-through/kubernetes-resources.md new file mode 100644 index 000000000000..0e65e1220068 --- /dev/null +++ b/docs/walk-through/kubernetes-resources.md @@ -0,0 +1,84 @@ +# Kubernetes Resources + +In many cases, you will want to manage Kubernetes resources from Argo workflows. The resource template allows you to create, delete or updated any type of Kubernetes resource. + +```yaml +# in a workflow. The resource template type accepts any k8s manifest +# (including CRDs) and can perform any `kubectl` action against it (e.g. create, +# apply, delete, patch). +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: k8s-jobs- +spec: + entrypoint: pi-tmpl + templates: + - name: pi-tmpl + resource: # indicates that this is a resource template + action: create # can be any kubectl action (e.g. create, delete, apply, patch) + # The successCondition and failureCondition are optional expressions. + # If failureCondition is true, the step is considered failed. + # If successCondition is true, the step is considered successful. + # They use kubernetes label selection syntax and can be applied against any field + # of the resource (not just labels). Multiple AND conditions can be represented by comma + # delimited expressions. + # For more details: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + successCondition: status.succeeded > 0 + failureCondition: status.failed > 3 + manifest: | #put your kubernetes spec here + apiVersion: batch/v1 + kind: Job + metadata: + generateName: pi-job- + spec: + template: + metadata: + name: pi + spec: + containers: + - name: pi + image: perl + command: ["perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"] + restartPolicy: Never + backoffLimit: 4 +``` + +**Note:** +Currently only a single resource can be managed by a resource template so either a `generateName` or `name` must be provided in the resource's meta-data. + +Resources created in this way are independent of the workflow. If you want the resource to be deleted when the workflow is deleted then you can use [Kubernetes garbage collection](https://kubernetes.io/docs/concepts/workloads/controllers/garbage-collection/) with the workflow resource as an owner reference ([example](https://github.com/argoproj/argo-workflows/tree/master/examples/k8s-owner-reference.yaml)). + +You can also collect data about the resource in output parameters (see more at [k8s-jobs.yaml](https://github.com/argoproj/argo-workflows/tree/master/examples/k8s-jobs.yaml)) + +**Note:** +When patching, the resource will accept another attribute, `mergeStrategy`, which can either be `strategic`, `merge`, or `json`. If this attribute is not supplied, it will default to `strategic`. Keep in mind that Custom Resources cannot be patched with `strategic`, so a different strategy must be chosen. For example, suppose you have the [`CronTab` CRD](https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#create-a-customresourcedefinition) defined, and the following instance of a `CronTab`: + +```yaml +apiVersion: "stable.example.com/v1" +kind: CronTab +spec: + cronSpec: "* * * * */5" + image: my-awesome-cron-image +``` + +This `CronTab` can be modified using the following Argo Workflow: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: k8s-patch- +spec: + entrypoint: cront-tmpl + templates: + - name: cront-tmpl + resource: + action: patch + mergeStrategy: merge # Must be one of [strategic merge json] + manifest: | + apiVersion: "stable.example.com/v1" + kind: CronTab + spec: + cronSpec: "* * * * */10" + image: my-awesome-cron-image +``` diff --git a/docs/walk-through/loops.md b/docs/walk-through/loops.md new file mode 100644 index 000000000000..6c04a702aead --- /dev/null +++ b/docs/walk-through/loops.md @@ -0,0 +1,161 @@ +# Loops + +When writing workflows, it is often very useful to be able to iterate over a set of inputs as shown in this example: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: loops- +spec: + entrypoint: loop-example + templates: + - name: loop-example + steps: + - - name: print-message + template: whalesay + arguments: + parameters: + - name: message + value: "{{item}}" + withItems: # invoke whalesay once for each item in parallel + - hello world # item 1 + - goodbye world # item 2 + + - name: whalesay + inputs: + parameters: + - name: message + container: + image: docker/whalesay:latest + command: [cowsay] + args: ["{{inputs.parameters.message}}"] +``` + +We can also iterate over sets of items: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: loops-maps- +spec: + entrypoint: loop-map-example + templates: + - name: loop-map-example + steps: + - - name: test-linux + template: cat-os-release + arguments: + parameters: + - name: image + value: "{{item.image}}" + - name: tag + value: "{{item.tag}}" + withItems: + - { image: 'debian', tag: '9.1' } #item set 1 + - { image: 'debian', tag: '8.9' } #item set 2 + - { image: 'alpine', tag: '3.6' } #item set 3 + - { image: 'ubuntu', tag: '17.10' } #item set 4 + + - name: cat-os-release + inputs: + parameters: + - name: image + - name: tag + container: + image: "{{inputs.parameters.image}}:{{inputs.parameters.tag}}" + command: [cat] + args: [/etc/os-release] +``` + +We can pass lists of items as parameters: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: loops-param-arg- +spec: + entrypoint: loop-param-arg-example + arguments: + parameters: + - name: os-list # a list of items + value: | + [ + { "image": "debian", "tag": "9.1" }, + { "image": "debian", "tag": "8.9" }, + { "image": "alpine", "tag": "3.6" }, + { "image": "ubuntu", "tag": "17.10" } + ] + + templates: + - name: loop-param-arg-example + inputs: + parameters: + - name: os-list + steps: + - - name: test-linux + template: cat-os-release + arguments: + parameters: + - name: image + value: "{{item.image}}" + - name: tag + value: "{{item.tag}}" + withParam: "{{inputs.parameters.os-list}}" # parameter specifies the list to iterate over + + # This template is the same as in the previous example + - name: cat-os-release + inputs: + parameters: + - name: image + - name: tag + container: + image: "{{inputs.parameters.image}}:{{inputs.parameters.tag}}" + command: [cat] + args: [/etc/os-release] +``` + +We can even dynamically generate the list of items to iterate over! + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: loops-param-result- +spec: + entrypoint: loop-param-result-example + templates: + - name: loop-param-result-example + steps: + - - name: generate + template: gen-number-list + # Iterate over the list of numbers generated by the generate step above + - - name: sleep + template: sleep-n-sec + arguments: + parameters: + - name: seconds + value: "{{item}}" + withParam: "{{steps.generate.outputs.result}}" + + # Generate a list of numbers in JSON format + - name: gen-number-list + script: + image: python:alpine3.6 + command: [python] + source: | + import json + import sys + json.dump([i for i in range(20, 31)], sys.stdout) + + - name: sleep-n-sec + inputs: + parameters: + - name: seconds + container: + image: alpine:latest + command: [sh, -c] + args: ["echo sleeping for {{inputs.parameters.seconds}} seconds; sleep {{inputs.parameters.seconds}}; echo done"] +``` diff --git a/docs/walk-through/output-parameters.md b/docs/walk-through/output-parameters.md new file mode 100644 index 000000000000..3515dbd46d4c --- /dev/null +++ b/docs/walk-through/output-parameters.md @@ -0,0 +1,61 @@ +# Output Parameters + +Output parameters provide a general mechanism to use the result of a step as a parameter (and not just as an artifact). This allows you to use the result from any type of step, not just a `script`, for conditional tests, loops, and arguments. Output parameters work similarly to `script result` except that the value of the output parameter is set to the contents of a generated file rather than the contents of `stdout`. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: output-parameter- +spec: + entrypoint: output-parameter + templates: + - name: output-parameter + steps: + - - name: generate-parameter + template: whalesay + - - name: consume-parameter + template: print-message + arguments: + parameters: + # Pass the hello-param output from the generate-parameter step as the message input to print-message + - name: message + value: "{{steps.generate-parameter.outputs.parameters.hello-param}}" + + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["echo -n hello world > /tmp/hello_world.txt"] # generate the content of hello_world.txt + outputs: + parameters: + - name: hello-param # name of output parameter + valueFrom: + path: /tmp/hello_world.txt # set the value of hello-param to the contents of this hello-world.txt + + - name: print-message + inputs: + parameters: + - name: message + container: + image: docker/whalesay:latest + command: [cowsay] + args: ["{{inputs.parameters.message}}"] +``` + +DAG templates use the tasks prefix to refer to another task, for example `{{tasks.generate-parameter.outputs.parameters.hello-param}}`. + +## `result` output parameter + +The `result` output parameter captures standard output. +It is accessible from the `outputs` map: `outputs.result`. +Only 256 kb of the standard output stream will be captured. + +### Scripts + +Outputs of a `script` are assigned to standard output and captured in the `result` parameter. More details [here](scripts-and-results.md). + +### Containers + +Container steps and tasks also have their standard output captured in the `result` parameter. +Given a `task`, called `log-int`, `result` would then be accessible as `{{ tasks.log-int.outputs.result }}`. If using [steps](steps.md), substitute `tasks` for `steps`: `{{ steps.log-int.outputs.result }}`. diff --git a/docs/walk-through/parameters.md b/docs/walk-through/parameters.md new file mode 100644 index 000000000000..0d835ca8adfd --- /dev/null +++ b/docs/walk-through/parameters.md @@ -0,0 +1,91 @@ +# Parameters + +Let's look at a slightly more complex workflow spec with parameters. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: hello-world-parameters- +spec: + # invoke the whalesay template with + # "hello world" as the argument + # to the message parameter + entrypoint: whalesay + arguments: + parameters: + - name: message + value: hello world + + templates: + - name: whalesay + inputs: + parameters: + - name: message # parameter declaration + container: + # run cowsay with that message input parameter as args + image: docker/whalesay + command: [cowsay] + args: ["{{inputs.parameters.message}}"] +``` + +This time, the `whalesay` template takes an input parameter named `message` that is passed as the `args` to the `cowsay` command. In order to reference parameters (e.g., ``"{{inputs.parameters.message}}"``), the parameters must be enclosed in double quotes to escape the curly braces in YAML. + +The argo CLI provides a convenient way to override parameters used to invoke the entrypoint. For example, the following command would bind the `message` parameter to "goodbye world" instead of the default "hello world". + +```bash +argo submit arguments-parameters.yaml -p message="goodbye world" +``` + +In case of multiple parameters that can be overridden, the argo CLI provides a command to load parameters files in YAML or JSON format. Here is an example of that kind of parameter file: + +```yaml +message: goodbye world +``` + +To run use following command: + +```bash +argo submit arguments-parameters.yaml --parameter-file params.yaml +``` + +Command-line parameters can also be used to override the default entrypoint and invoke any template in the workflow spec. For example, if you add a new version of the `whalesay` template called `whalesay-caps` but you don't want to change the default entrypoint, you can invoke this from the command line as follows: + +```bash +argo submit arguments-parameters.yaml --entrypoint whalesay-caps +``` + +By using a combination of the `--entrypoint` and `-p` parameters, you can call any template in the workflow spec with any parameter that you like. + +The values set in the `spec.arguments.parameters` are globally scoped and can be accessed via `{{workflow.parameters.parameter_name}}`. This can be useful to pass information to multiple steps in a workflow. For example, if you wanted to run your workflows with different logging levels that are set in the environment of each container, you could have a YAML file similar to this one: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: global-parameters- +spec: + entrypoint: A + arguments: + parameters: + - name: log-level + value: INFO + + templates: + - name: A + container: + image: containerA + env: + - name: LOG_LEVEL + value: "{{workflow.parameters.log-level}}" + command: [runA] + - name: B + container: + image: containerB + env: + - name: LOG_LEVEL + value: "{{workflow.parameters.log-level}}" + command: [runB] +``` + +In this workflow, both steps `A` and `B` would have the same log-level set to `INFO` and can easily be changed between workflow submissions using the `-p` flag. diff --git a/docs/walk-through/recursion.md b/docs/walk-through/recursion.md new file mode 100644 index 000000000000..759bd5f137ac --- /dev/null +++ b/docs/walk-through/recursion.md @@ -0,0 +1,69 @@ +# Recursion + +Templates can recursively invoke each other! In this variation of the above coin-flip template, we continue to flip coins until it comes up heads. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: coinflip-recursive- +spec: + entrypoint: coinflip + templates: + - name: coinflip + steps: + # flip a coin + - - name: flip-coin + template: flip-coin + # evaluate the result in parallel + - - name: heads + template: heads # call heads template if "heads" + when: "{{steps.flip-coin.outputs.result}} == heads" + - name: tails # keep flipping coins if "tails" + template: coinflip + when: "{{steps.flip-coin.outputs.result}} == tails" + + - name: flip-coin + script: + image: python:alpine3.6 + command: [python] + source: | + import random + result = "heads" if random.randint(0,1) == 0 else "tails" + print(result) + + - name: heads + container: + image: alpine:3.6 + command: [sh, -c] + args: ["echo \"it was heads\""] +``` + +Here's the result of a couple of runs of coin-flip for comparison. + +```bash +argo get coinflip-recursive-tzcb5 + +STEP PODNAME MESSAGE + ✔ coinflip-recursive-vhph5 + ├───✔ flip-coin coinflip-recursive-vhph5-2123890397 + └─┬─✔ heads coinflip-recursive-vhph5-128690560 + └─○ tails + +STEP PODNAME MESSAGE + ✔ coinflip-recursive-tzcb5 + ├───✔ flip-coin coinflip-recursive-tzcb5-322836820 + └─┬─○ heads + └─✔ tails + ├───✔ flip-coin coinflip-recursive-tzcb5-1863890320 + └─┬─○ heads + └─✔ tails + ├───✔ flip-coin coinflip-recursive-tzcb5-1768147140 + └─┬─○ heads + └─✔ tails + ├───✔ flip-coin coinflip-recursive-tzcb5-4080411136 + └─┬─✔ heads coinflip-recursive-tzcb5-4080323273 + └─○ tails +``` + +In the first run, the coin immediately comes up heads and we stop. In the second run, the coin comes up tail three times before it finally comes up heads and we stop. diff --git a/docs/walk-through/retrying-failed-or-errored-steps.md b/docs/walk-through/retrying-failed-or-errored-steps.md new file mode 100644 index 000000000000..42795b3cf348 --- /dev/null +++ b/docs/walk-through/retrying-failed-or-errored-steps.md @@ -0,0 +1,36 @@ +# Retrying Failed or Errored Steps + +You can specify a `retryStrategy` that will dictate how failed or errored steps are retried: + +```yaml +# This example demonstrates the use of retry back offs +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: retry-backoff- +spec: + entrypoint: retry-backoff + templates: + - name: retry-backoff + retryStrategy: + limit: 10 + retryPolicy: "Always" + backoff: + duration: "1" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" + factor: 2 + maxDuration: "1m" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" + affinity: + nodeAntiAffinity: {} + container: + image: python:alpine3.6 + command: ["python", -c] + # fail with a 66% probability + args: ["import random; import sys; exit_code = random.choice([0, 1, 1]); sys.exit(exit_code)"] +``` + +* `limit` is the maximum number of times the container will be retried. +* `retryPolicy` specifies if a container will be retried on failure, error, both, or only transient errors (e.g. i/o or TLS handshake timeout). "Always" retries on both errors and failures. Also available: `OnFailure` (default), "`OnError`", and "`OnTransientError`" (available after v3.0.0-rc2). +* `backoff` is an exponential back-off +* `nodeAntiAffinity` prevents running steps on the same host. Current implementation allows only empty `nodeAntiAffinity` (i.e. `nodeAntiAffinity: {}`) and by default it uses label `kubernetes.io/hostname` as the selector. + +Providing an empty `retryStrategy` (i.e. `retryStrategy: {}`) will cause a container to retry until completion. diff --git a/docs/walk-through/scripts-and-results.md b/docs/walk-through/scripts-and-results.md new file mode 100644 index 000000000000..07cb9dc2fea6 --- /dev/null +++ b/docs/walk-through/scripts-and-results.md @@ -0,0 +1,60 @@ +# Scripts And Results + +Often, we just want a template that executes a script specified as a here-script (also known as a `here document`) in the workflow spec. This example shows how to do that: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: scripts-bash- +spec: + entrypoint: bash-script-example + templates: + - name: bash-script-example + steps: + - - name: generate + template: gen-random-int-bash + - - name: print + template: print-message + arguments: + parameters: + - name: message + value: "{{steps.generate.outputs.result}}" # The result of the here-script + + - name: gen-random-int-bash + script: + image: debian:9.4 + command: [bash] + source: | # Contents of the here-script + cat /dev/urandom | od -N2 -An -i | awk -v f=1 -v r=100 '{printf "%i\n", f + r * $1 / 65536}' + + - name: gen-random-int-python + script: + image: python:alpine3.6 + command: [python] + source: | + import random + i = random.randint(1, 100) + print(i) + + - name: gen-random-int-javascript + script: + image: node:9.1-alpine + command: [node] + source: | + var rand = Math.floor(Math.random() * 100); + console.log(rand); + + - name: print-message + inputs: + parameters: + - name: message + container: + image: alpine:latest + command: [sh, -c] + args: ["echo result was: {{inputs.parameters.message}}"] +``` + +The `script` keyword allows the specification of the script body using the `source` tag. This creates a temporary file containing the script body and then passes the name of the temporary file as the final parameter to `command`, which should be an interpreter that executes the script body. + +The use of the `script` feature also assigns the standard output of running the script to a special output parameter named `result`. This allows you to use the result of running the script itself in the rest of the workflow spec. In this example, the result is simply echoed by the print-message template. diff --git a/docs/walk-through/secrets.md b/docs/walk-through/secrets.md new file mode 100644 index 000000000000..e5f67e29a79e --- /dev/null +++ b/docs/walk-through/secrets.md @@ -0,0 +1,40 @@ +# Secrets + +Argo supports the same secrets syntax and mechanisms as Kubernetes Pod specs, which allows access to secrets as environment variables or volume mounts. See the [Kubernetes documentation](https://kubernetes.io/docs/concepts/configuration/secret/) for more information. + +```yaml +# To run this example, first create the secret by running: +# kubectl create secret generic my-secret --from-literal=mypassword=S00perS3cretPa55word +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: secret-example- +spec: + entrypoint: whalesay + # To access secrets as files, add a volume entry in spec.volumes[] and + # then in the container template spec, add a mount using volumeMounts. + volumes: + - name: my-secret-vol + secret: + secretName: my-secret # name of an existing k8s secret + templates: + - name: whalesay + container: + image: alpine:3.7 + command: [sh, -c] + args: [' + echo "secret from env: $MYSECRETPASSWORD"; + echo "secret from file: `cat /secret/mountpath/mypassword`" + '] + # To access secrets as environment variables, use the k8s valueFrom and + # secretKeyRef constructs. + env: + - name: MYSECRETPASSWORD # name of env var + valueFrom: + secretKeyRef: + name: my-secret # name of an existing k8s secret + key: mypassword # 'key' subcomponent of the secret + volumeMounts: + - name: my-secret-vol # mount file containing secret at /secret/mountpath + mountPath: "/secret/mountpath" +``` diff --git a/docs/walk-through/sidecars.md b/docs/walk-through/sidecars.md new file mode 100644 index 000000000000..feebff026966 --- /dev/null +++ b/docs/walk-through/sidecars.md @@ -0,0 +1,26 @@ +# Sidecars + +A sidecar is another container that executes concurrently in the same pod as the main container and is useful in creating multi-container pods. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: sidecar-nginx- +spec: + entrypoint: sidecar-nginx-example + templates: + - name: sidecar-nginx-example + container: + image: appropriate/curl + command: [sh, -c] + # Try to read from nginx web server until it comes up + args: ["until `curl -G 'http://127.0.0.1/' >& /tmp/out`; do echo sleep && sleep 1; done && cat /tmp/out"] + # Create a simple nginx web server + sidecars: + - name: nginx + image: nginx:1.13 + command: [nginx, -g, daemon off;] +``` + +In the above example, we create a sidecar container that runs Nginx as a simple web server. The order in which containers come up is random, so in this example the main container polls the Nginx container until it is ready to service requests. This is a good design pattern when designing multi-container systems: always wait for any services you need to come up before running your main code. diff --git a/docs/walk-through/steps.md b/docs/walk-through/steps.md new file mode 100644 index 000000000000..1815c794fb71 --- /dev/null +++ b/docs/walk-through/steps.md @@ -0,0 +1,57 @@ +# Steps + +In this example, we'll see how to create multi-step workflows, how to define more than one template in a workflow spec, and how to create nested workflows. Be sure to read the comments as they provide useful explanations. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: steps- +spec: + entrypoint: hello-hello-hello + + # This spec contains two templates: hello-hello-hello and whalesay + templates: + - name: hello-hello-hello + # Instead of just running a container + # This template has a sequence of steps + steps: + - - name: hello1 # hello1 is run before the following steps + template: whalesay + arguments: + parameters: + - name: message + value: "hello1" + - - name: hello2a # double dash => run after previous step + template: whalesay + arguments: + parameters: + - name: message + value: "hello2a" + - name: hello2b # single dash => run in parallel with previous step + template: whalesay + arguments: + parameters: + - name: message + value: "hello2b" + + # This is the same template as from the previous example + - name: whalesay + inputs: + parameters: + - name: message + container: + image: docker/whalesay + command: [cowsay] + args: ["{{inputs.parameters.message}}"] +``` + +The above workflow spec prints three different flavors of "hello". The `hello-hello-hello` template consists of three `steps`. The first step named `hello1` will be run in sequence whereas the next two steps named `hello2a` and `hello2b` will be run in parallel with each other. Using the argo CLI command, we can graphically display the execution history of this workflow spec, which shows that the steps named `hello2a` and `hello2b` ran in parallel with each other. + +```bash +STEP TEMPLATE PODNAME DURATION MESSAGE + ✔ steps-z2zdn hello-hello-hello + ├───✔ hello1 whalesay steps-z2zdn-27420706 2s + └─┬─✔ hello2a whalesay steps-z2zdn-2006760091 3s + └─✔ hello2b whalesay steps-z2zdn-2023537710 3s +``` diff --git a/docs/walk-through/suspending.md b/docs/walk-through/suspending.md new file mode 100644 index 000000000000..53b957c108aa --- /dev/null +++ b/docs/walk-through/suspending.md @@ -0,0 +1,50 @@ +# Suspending + +Workflows can be suspended by + +```bash +argo suspend WORKFLOW +``` + +Or by specifying a `suspend` step on the workflow: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: suspend-template- +spec: + entrypoint: suspend + templates: + - name: suspend + steps: + - - name: build + template: whalesay + - - name: approve + template: approve + - - name: delay + template: delay + - - name: release + template: whalesay + + - name: approve + suspend: {} + + - name: delay + suspend: + duration: "20" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" + + - name: whalesay + container: + image: docker/whalesay + command: [cowsay] + args: ["hello world"] +``` + +Once suspended, a Workflow will not schedule any new steps until it is resumed. It can be resumed manually by + +```bash +argo resume WORKFLOW +``` + +Or automatically with a `duration` limit as the example above. diff --git a/docs/walk-through/the-structure-of-workflow-specs.md b/docs/walk-through/the-structure-of-workflow-specs.md new file mode 100644 index 000000000000..2fa0129d8e6a --- /dev/null +++ b/docs/walk-through/the-structure-of-workflow-specs.md @@ -0,0 +1,19 @@ +# The Structure of Workflow Specs + +We now know enough about the basic components of a workflow spec. To review its basic structure: + +- Kubernetes header including meta-data +- Spec body + - Entrypoint invocation with optional arguments + - List of template definitions + +- For each template definition + - Name of the template + - Optionally a list of inputs + - Optionally a list of outputs + - Container invocation (leaf template) or a list of steps + - For each step, a template invocation + +To summarize, workflow specs are composed of a set of Argo templates where each template consists of an optional input section, an optional output section and either a container invocation or a list of steps where each step invokes another template. + +Note that the container section of the workflow spec will accept the same options as the container section of a pod spec, including but not limited to environment variables, secrets, and volume mounts. Similarly, for volume claims and volumes. diff --git a/docs/walk-through/timeouts.md b/docs/walk-through/timeouts.md new file mode 100644 index 000000000000..32076f147ca0 --- /dev/null +++ b/docs/walk-through/timeouts.md @@ -0,0 +1,20 @@ +# Timeouts + +To limit the elapsed time for a workflow, you can set the variable `activeDeadlineSeconds`. + +```yaml +# To enforce a timeout for a container template, specify a value for activeDeadlineSeconds. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: timeouts- +spec: + entrypoint: sleep + templates: + - name: sleep + container: + image: alpine:latest + command: [sh, -c] + args: ["echo sleeping for 1m; sleep 60; echo done"] + activeDeadlineSeconds: 10 # terminate container template after 10 seconds +``` diff --git a/docs/walk-through/volumes.md b/docs/walk-through/volumes.md new file mode 100644 index 000000000000..87d3224fffae --- /dev/null +++ b/docs/walk-through/volumes.md @@ -0,0 +1,196 @@ +# Volumes + +The following example dynamically creates a volume and then uses the volume in a two step workflow. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: volumes-pvc- +spec: + entrypoint: volumes-pvc-example + volumeClaimTemplates: # define volume, same syntax as k8s Pod spec + - metadata: + name: workdir # name of volume claim + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Gi # Gi => 1024 * 1024 * 1024 + + templates: + - name: volumes-pvc-example + steps: + - - name: generate + template: whalesay + - - name: print + template: print-message + + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] + # Mount workdir volume at /mnt/vol before invoking docker/whalesay + volumeMounts: # same syntax as k8s Pod spec + - name: workdir + mountPath: /mnt/vol + + - name: print-message + container: + image: alpine:latest + command: [sh, -c] + args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] + # Mount workdir volume at /mnt/vol before invoking docker/whalesay + volumeMounts: # same syntax as k8s Pod spec + - name: workdir + mountPath: /mnt/vol + +``` + +Volumes are a very useful way to move large amounts of data from one step in a workflow to another. Depending on the system, some volumes may be accessible concurrently from multiple steps. + +In some cases, you want to access an already existing volume rather than creating/destroying one dynamically. + +```yaml +# Define Kubernetes PVC +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: my-existing-volume +spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Gi + +--- +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: volumes-existing- +spec: + entrypoint: volumes-existing-example + volumes: + # Pass my-existing-volume as an argument to the volumes-existing-example template + # Same syntax as k8s Pod spec + - name: workdir + persistentVolumeClaim: + claimName: my-existing-volume + + templates: + - name: volumes-existing-example + steps: + - - name: generate + template: whalesay + - - name: print + template: print-message + + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] + volumeMounts: + - name: workdir + mountPath: /mnt/vol + + - name: print-message + container: + image: alpine:latest + command: [sh, -c] + args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] + volumeMounts: + - name: workdir + mountPath: /mnt/vol +``` + +It's also possible to declare existing volumes at the template level, instead of the workflow level. +Workflows can generate volumes using a `resource` step. + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: template-level-volume- +spec: + entrypoint: generate-and-use-volume + templates: + - name: generate-and-use-volume + steps: + - - name: generate-volume + template: generate-volume + arguments: + parameters: + - name: pvc-size + # In a real-world example, this could be generated by a previous workflow step. + value: '1Gi' + - - name: generate + template: whalesay + arguments: + parameters: + - name: pvc-name + value: '{{steps.generate-volume.outputs.parameters.pvc-name}}' + - - name: print + template: print-message + arguments: + parameters: + - name: pvc-name + value: '{{steps.generate-volume.outputs.parameters.pvc-name}}' + + - name: generate-volume + inputs: + parameters: + - name: pvc-size + resource: + action: create + setOwnerReference: true + manifest: | + apiVersion: v1 + kind: PersistentVolumeClaim + metadata: + generateName: pvc-example- + spec: + accessModes: ['ReadWriteOnce', 'ReadOnlyMany'] + resources: + requests: + storage: '{{inputs.parameters.pvc-size}}' + outputs: + parameters: + - name: pvc-name + valueFrom: + jsonPath: '{.metadata.name}' + + - name: whalesay + inputs: + parameters: + - name: pvc-name + volumes: + - name: workdir + persistentVolumeClaim: + claimName: '{{inputs.parameters.pvc-name}}' + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] + volumeMounts: + - name: workdir + mountPath: /mnt/vol + + - name: print-message + inputs: + parameters: + - name: pvc-name + volumes: + - name: workdir + persistentVolumeClaim: + claimName: '{{inputs.parameters.pvc-name}}' + container: + image: alpine:latest + command: [sh, -c] + args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] + volumeMounts: + - name: workdir + mountPath: /mnt/vol + +``` diff --git a/docs/webhooks.md b/docs/webhooks.md index 42d8418ab096..01947ab1f11f 100644 --- a/docs/webhooks.md +++ b/docs/webhooks.md @@ -1,6 +1,5 @@ # Webhooks - > v2.11 and after Many clients can send events via the [events](events.md) API endpoint using a standard authorization header. However, for clients that are unable to do so (e.g. because they use signature verification as proof of origin), additional configuration is required. @@ -8,15 +7,14 @@ Many clients can send events via the [events](events.md) API endpoint using a st In the namespace that will receive the event, create [access token](access-token.md) resources for your client: * A role with permissions to get workflow templates and to create a workflow: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml) -* A service account for the client: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/github.com-sa.yaml). +* A service account for the client: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/github.com-sa.yaml). * A binding of the account to the role: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml) Additionally create: -* A secret named "argo-workflows-webhook-clients" listing the service accounts: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml) - -The secret "argo-workflows-webhook-clients" tells Argo: +* A secret named `argo-workflows-webhook-clients` listing the service accounts: [example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml) -* What type of webhook the account can be used for, e.g. "github" -* What "secret" that webhook is configured for, e.g. in your [Github settings page](https://github.com/alexec/argo/settings/hooks) +The secret `argo-workflows-webhook-clients` tells Argo: +* What type of webhook the account can be used for, e.g. `github`. +* What "secret" that webhook is configured for, e.g. in your Github settings page. diff --git a/docs/widgets.md b/docs/widgets.md index b5ff4d5bfaf0..475c57af19b7 100644 --- a/docs/widgets.md +++ b/docs/widgets.md @@ -2,7 +2,7 @@ > v3.0 and after -Widgets are intended to be embedded into other applications using iframes. This is may not work with your configuration. You may need to: +Widgets are intended to be embedded into other applications using inline frames (`iframe`). This may not work with your configuration. You may need to: * Run the Argo Server with an account that can read workflows. That can be done using `--auth-mode=server` and configuring the `argo-server` service account. * Run the Argo Server with `--x-frame-options=SAMEORIGIN` or `--x-frame-options=`. diff --git a/docs/windows.md b/docs/windows.md index 7b124d0aa119..1086b8015d5a 100644 --- a/docs/windows.md +++ b/docs/windows.md @@ -3,9 +3,10 @@ The Argo server and the workflow controller currently only run on Linux. The workflow executor however also runs on Windows nodes, meaning you can use Windows containers inside your workflows! Here are the steps to get started. ## Requirements + * Kubernetes 1.14 or later, supporting Windows nodes * Hybrid cluster containing Linux and Windows nodes like described in the [Kubernetes docs](https://kubernetes.io/docs/setup/production-environment/windows/user-guide-windows-containers/) -* Argo configured and running like described [here](quick-start.md) +* Argo configured and running like described [here](quick-start.md) ## Schedule workflows with Windows containers @@ -29,7 +30,8 @@ spec: ``` You can run this example and get the logs: -``` + +```bash $ argo submit --watch https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-windows.yaml $ argo logs hello-windows-s9kk5 hello-windows-s9kk5: "Hello from Windows Container!" @@ -37,9 +39,10 @@ hello-windows-s9kk5: "Hello from Windows Container!" ## Schedule hybrid workflows -You can also run different steps on different host OSs. This can for example be very helpful when you need to compile your application on Windows and Linux. +You can also run different steps on different host operating systems. This can for example be very helpful when you need to compile your application on Windows and Linux. An example workflow can look like the following: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -69,11 +72,11 @@ spec: image: alpine command: [echo] args: ["Hello from Linux Container!"] - ``` Again, you can run this example and get the logs: -``` + +```bash $ argo submit --watch https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-hybrid.yaml $ argo logs hello-hybrid-plqpp hello-hybrid-plqpp-1977432187: "Hello from Windows Container!" @@ -105,16 +108,16 @@ Remember that [volume mounts on Windows can only target a directory](https://kub ## Limitations -- Sharing process namespaces [doesn't work on Windows](https://kubernetes.io/docs/setup/production-environment/windows/intro-windows-in-kubernetes/#v1-pod) so you can't use the Process Namespace Sharing (pns) workflow executor. -- The argoexec Windows container is built using [nanoserver:1809](https://github.com/argoproj/argo-workflows/blob/b18b9920f678f420552864eccf3d4b98f3604cfa/Dockerfile.windows#L28) as the base image. Running a newer windows version (e.g. 1909) is currently [not confirmed to be working](https://github.com/argoproj/argo-workflows/issues/5376). If this is required, you need to build the argoexec container yourself by first adjusting the base image. +* Sharing process namespaces [doesn't work on Windows](https://kubernetes.io/docs/setup/production-environment/windows/intro-windows-in-kubernetes/#v1-pod) so you can't use the Process Namespace Sharing (PNS) workflow executor. +* The executor Windows container is built using [Nano Server](https://github.com/argoproj/argo-workflows/blob/b18b9920f678f420552864eccf3d4b98f3604cfa/Dockerfile.windows#L28) as the base image. Running a newer windows version (e.g. 1909) is currently [not confirmed to be working](https://github.com/argoproj/argo-workflows/issues/5376). If this is required, you need to build the executor container yourself by first adjusting the base image. ## Building the workflow executor image for Windows To build the workflow executor image for Windows you need a Windows machine running Windows Server 2019 with Docker installed like described [in the docs](https://docs.docker.com/ee/docker-ee/windows/docker-ee/#install-docker-engine---enterprise). -You then clone the project and run the Docker build with the Dockerfile for Windows and `argoexec` as a target: +You then clone the project and run the Docker build with the `Dockerfile` for Windows and `argoexec` as a target: -``` +```bash git clone https://github.com/argoproj/argo-workflows.git cd argo docker build -t myargoexec -f .\Dockerfile.windows --target argoexec . diff --git a/docs/work-avoidance.md b/docs/work-avoidance.md index 083573ef8524..a2d99a6bb5ec 100644 --- a/docs/work-avoidance.md +++ b/docs/work-avoidance.md @@ -1,7 +1,5 @@ # Work Avoidance -![GA](assets/ga.svg) - > v2.9 and after You can make workflows faster and more robust by employing **work avoidance**. A workflow that utilizes this is simply a workflow containing steps that do not run if the work has already been done. This simplest way to do this is to use **marker files**. @@ -9,9 +7,9 @@ You can make workflows faster and more robust by employing **work avoidance**. A Use cases: * An expensive step appears across multiple workflows - you want to avoid repeating them. -* A workflow has unreliable tasks - you want to be able resubmit the workflow. +* A workflow has unreliable tasks - you want to be able to resubmit the workflow. -A **marker file** is a file on that indicates the work has already been done, before doing the work you check to see if the marker has already been done: +A **marker file** is a file that indicates the work has already been done. Before doing the work you check to see if the marker has already been done: ```sh if [ -e /work/markers/name-of-task ]; then @@ -21,18 +19,18 @@ fi echo "working very hard" touch /work/markers/name-of-task ``` - + Choose a name for the file that is unique for the task, e.g. the template name and all the parameters: ```sh touch /work/markers/$(date +%Y-%m-%d)-echo-{{inputs.parameters.num}} -``` - -You need to store the marker files between workflows and this can be achieved using [a PVC](fields.md#persistentvolumeclaim) and [optional input artifact](fields.md#artifact). +``` + +You need to store the marker files between workflows and this can be achieved using [a PVC](fields.md#persistentvolumeclaim) and [optional input artifact](fields.md#artifact). [This complete work avoidance example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/work-avoidance.yaml) has the following: * A PVC to store the markers on. * A `load-markers` step that loads the marker files from artifact storage. * Multiple `echo` tasks that avoid work using marker files. -* A `save-markers` exit handler to save the marker files, even if they are not needed. +* A `save-markers` exit handler to save the marker files, even if they are not needed. diff --git a/docs/workflow-archive.md b/docs/workflow-archive.md index 3e295295f8b2..e76fb1a95c2f 100644 --- a/docs/workflow-archive.md +++ b/docs/workflow-archive.md @@ -1,10 +1,8 @@ # Workflow Archive -![GA](assets/ga.svg) - > v2.5 and after -For many uses, you may wish to keep workflows for a long time. Argo can save completed workflows to an SQL database. +For many uses, you may wish to keep workflows for a long time. Argo can save completed workflows to an SQL database. To enable this feature, configure a Postgres or MySQL (>= 5.7.8) database under `persistence` in [your configuration](workflow-controller-configmap.yaml) and set `archive: true`. @@ -12,15 +10,16 @@ Be aware that this feature will only archive the statuses of the workflows (whic However, the logs of each pod will NOT be archived. If you need to access the logs of the pods, you need to setup [an artifact repository](artifact-repository-ref.md) thanks to [this doc](configure-artifact-repository.md). -In addition the table specified in the configmap above, the following tables are created when enabling archiving: +In addition to the table specified in the config map above, the following tables are created when enabling archiving: -* argo_archived_workflows -* argo_archived_workflows_labels -* schema_history +* `argo_archived_workflows` +* `argo_archived_workflows_labels` +* `schema_history` The database migration will only occur successfully if none of the tables exist. If a partial set of the tables exist, the database migration may fail and the Argo workflow-controller pod may fail to start. If this occurs delete all of the tables and try restarting the deployment. ## Required database permissions ### Postgres + The database user/role needs to have `CREATE` and `USAGE` permissions on the `public` schema of the database so that the necessary table can be generated during the migration. diff --git a/docs/workflow-concepts.md b/docs/workflow-concepts.md index ee7839a6424c..e7724a82c829 100644 --- a/docs/workflow-concepts.md +++ b/docs/workflow-concepts.md @@ -45,9 +45,10 @@ These templates _define_ work to be done, usually in a Container. ##### [Container](fields.md#container) -Perhaps the most common template type, it will schedule a Container. The spec of the template is the same as the [K8s container spec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.19/#container-v1-core), so you can define a container here the same way you do anywhere else in K8s. - +Perhaps the most common template type, it will schedule a Container. The spec of the template is the same as the [Kubernetes container spec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.19/#container-v1-core), so you can define a container here the same way you do anywhere else in Kubernetes. + Example: + ```yaml - name: whalesay container: @@ -59,9 +60,10 @@ Example: ##### [Script](fields.md#scripttemplate) A convenience wrapper around a `container`. The spec is the same as a container, but adds the `source:` field which allows you to define a script in-place. -The script will be saved into a file and executed for you. The result of the script is automatically exported into an [Argo variable](./variables.md) either `{{tasks..outputs.result}}` or `{{steps..outputs.result}}`, depending how it was called. - +The script will be saved into a file and executed for you. The result of the script is automatically exported into an [Argo variable](./variables.md) either `{{tasks..outputs.result}}` or `{{steps..outputs.result}}`, depending how it was called. + Example: + ```yaml - name: gen-random-int script: @@ -76,8 +78,9 @@ Example: ##### [Resource](fields.md#resourcetemplate) Performs operations on cluster Resources directly. It can be used to get, create, apply, delete, replace, or patch resources on your cluster. - + This example creates a `ConfigMap` resource on the cluster: + ```yaml - name: k8s-owner-reference resource: @@ -94,8 +97,9 @@ This example creates a `ConfigMap` resource on the cluster: ##### [Suspend](fields.md#suspendtemplate) A suspend template will suspend execution, either for a duration or until it is resumed manually. Suspend templates can be resumed from the CLI (with `argo resume`), the API endpoint, or the UI. - + Example: + ```yaml - name: delay suspend: @@ -109,8 +113,9 @@ These templates are used to invoke/call other templates and provide execution co ##### [Steps](fields.md#workflowstep) A steps template allows you to define your tasks in a series of steps. The structure of the template is a "list of lists". Outer lists will run sequentially and inner lists will run in parallel. If you want to run inner lists one by one, use the [Synchronization](fields.md#synchronization) feature. You can set a wide array of options to control execution, such as [`when:` clauses to conditionally execute a step](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/coinflip.yaml). - + In this example `step1` runs first. Once it is completed, `step2a` and `step2b` will run in parallel: + ```yaml - name: hello-hello-hello steps: @@ -125,8 +130,9 @@ In this example `step1` runs first. Once it is completed, `step2a` and `step2b` ##### [DAG](fields.md#dagtemplate) A dag template allows you to define your tasks as a graph of dependencies. In a DAG, you list all your tasks and set which other tasks must complete before a particular task can begin. Tasks without any dependencies will be run immediately. - + In this example `A` runs first. Once it is completed, `B` and `C` will run in parallel and once they both complete, `D` will run: + ```yaml - name: diamond dag: diff --git a/docs/workflow-controller-configmap.md b/docs/workflow-controller-configmap.md index ceb278190cd5..b1d9aef6ab43 100644 --- a/docs/workflow-controller-configmap.md +++ b/docs/workflow-controller-configmap.md @@ -1,87 +1,11 @@ -# Workflow Controller Configmap +# Workflow Controller Config Map ## Introduction -The Workflow Controller Configmap is used to set controller-wide settings. +The Workflow Controller Config Map is used to set controller-wide settings. For a detailed example, please see [`workflow-controller-configmap.yaml`](./workflow-controller-configmap.yaml). -## Setting the Configmap - -The configmap should be saved as a K8s Configmap on the cluster in the same namespace as the `workflow-controller`. -It should then be referenced by the `workflow-controller` and `argo-server` as a command argument: - -```yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: workflow-controller -spec: - selector: - matchLabels: - app: workflow-controller - template: - metadata: - labels: - app: workflow-controller - spec: - containers: - - args: - - --configmap - - workflow-controller-configmap # Set configmap name here - - --executor-image - - argoproj/argoexec:latest - - --namespaced - command: - - workflow-controller - image: argoproj/workflow-controller:latest - name: workflow-controller - serviceAccountName: argo - nodeSelector: - kubernetes.io/os: linux -``` -```yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: argo-server -spec: - selector: - matchLabels: - app: argo-server - template: - metadata: - labels: - app: argo-server - spec: - containers: - - args: - - server - - --configmap - - workflow-controller-configmap # Set configmap name here - image: argoproj/argocli:latest - name: argo-server - ports: - - containerPort: 2746 - name: web - readinessProbe: - httpGet: - path: / - port: 2746 - scheme: HTTP - initialDelaySeconds: 10 - periodSeconds: 20 - securityContext: - capabilities: - drop: - - ALL - volumeMounts: - - mountPath: /tmp - name: tmp - nodeSelector: - kubernetes.io/os: linux -``` - ## Alternate Structure In all versions, the configuration may be under a `config: |` key: diff --git a/docs/workflow-controller-configmap.yaml b/docs/workflow-controller-configmap.yaml index 044d4d6a05e6..c9567585cac3 100644 --- a/docs/workflow-controller-configmap.yaml +++ b/docs/workflow-controller-configmap.yaml @@ -146,6 +146,7 @@ data: # Specifies the container runtime interface to use (default: emissary) # must be one of: docker, kubelet, k8sapi, pns, emissary # It has lower precedence than either `--container-runtime-executor` and `containerRuntimeExecutors`. + # (removed in v3.4) containerRuntimeExecutor: emissary # Specifies the executor to use. @@ -158,6 +159,7 @@ data: # # The list is in order of precedence; the first matching executor is used. # This has precedence over `containerRuntimeExecutor`. + # (removed in v3.4) containerRuntimeExecutors: | - name: emissary selector: @@ -169,26 +171,24 @@ data: workflows.argoproj.io/container-runtime-executor: pns # Specifies the location of docker.sock on the host for docker executor (default: /var/run/docker.sock) - # (available since Argo v2.4) + # (available v2.4-v3.3) dockerSockPath: /var/someplace/else/docker.sock - # kubelet port when using kubelet executor (default: 10250) + # kubelet port when using kubelet executor (default: 10250) (kubelet executor will be deprecated use emissary instead) + # (removed in v3.4) kubeletPort: 10250 # disable the TLS verification of the kubelet executor (default: false) + # (removed in v3.4) kubeletInsecure: false # The command/args for each image, needed when the command is not specified and the emissary executor is used. # https://argoproj.github.io/argo-workflows/workflow-executors/#emissary-emissary images: | - argoproj/argosay:v1: - command: [cowsay] argoproj/argosay:v2: - command: [/argosay] + cmd: [/argosay] docker/whalesay:latest: - command: [cowsay] - python:alpine3.6: - command: [python3] + cmd: [/bin/bash] # Defaults for main containers. These can be overridden by the template. # <= v3.3 only `resources` are supported. diff --git a/docs/workflow-creator.md b/docs/workflow-creator.md index bc5809d4ade7..6ad7cd6b4016 100644 --- a/docs/workflow-creator.md +++ b/docs/workflow-creator.md @@ -1,10 +1,8 @@ # Workflow Creator -![GA](assets/ga.svg) - > v2.9 and after -If you create your workflow via the CLI or UI, an attempt will be made to label it with the user who created it +If you create your workflow via the CLI or UI, an attempt will be made to label it with the user who created it ```yaml apiVersion: argoproj.io/v1alpha1 @@ -15,8 +13,8 @@ metadata: workflows.argoproj.io/creator: admin # labels must be DNS formatted, so the "@" is replaces by '.at.' workflows.argoproj.io/creator-email: admin.at.your.org -``` + workflows.argoproj.io/creator-preferred-username: admin-preferred-username +``` !!! NOTE Labels only contain `[-_.0-9a-zA-Z]`, so any other characters will be turned into `-`. - \ No newline at end of file diff --git a/docs/workflow-events.md b/docs/workflow-events.md index d55b8ec3c250..0e906b8487b0 100644 --- a/docs/workflow-events.md +++ b/docs/workflow-events.md @@ -1,9 +1,9 @@ # Workflow Events -![GA](assets/ga.svg) - > v2.7.2 +⚠️ Do not use Kubernetes events for automation. Events maybe lost or rolled-up. + We emit Kubernetes events on certain events. Workflow state change: @@ -20,7 +20,6 @@ Node state change: * `WorkflowNodeFailed` * `WorkflowNodeError` - The involved object is the workflow in both cases. Additionally, for node state change events, annotations indicate the name and type of the involved node: ```yaml diff --git a/docs/workflow-executors.md b/docs/workflow-executors.md index a0d276a3dbfc..96b3ddcd0640 100644 --- a/docs/workflow-executors.md +++ b/docs/workflow-executors.md @@ -1,14 +1,14 @@ # Workflow Executors -A workflow executor is a process that conforms to a specific interface that allows Argo to perform certain actions like monitoring pod logs, collecting artifacts, managing container lifecycles, etc.. +A workflow executor is a process that conforms to a specific interface that allows Argo to perform certain actions like monitoring pod logs, collecting artifacts, managing container life-cycles, etc.. -The executor to be used in your workflows can be changed in [the configmap](./workflow-controller-configmap.yaml) under the `containerRuntimeExecutor` key. +The executor to be used in your workflows can be changed in [the config map](./workflow-controller-configmap.yaml) under the `containerRuntimeExecutor` key. ## Emissary (emissary) > v3.1 and after -**default in >= v3.3** +Default in >= v3.3. This is the most fully featured executor. @@ -27,7 +27,7 @@ This is the most fully featured executor. * Configuration: * `command` must be specified for containers. -You can determine the command and args as follows: +You can determine values as follows: ```bash docker image inspect -f '{{.Config.Entrypoint}} {{.Config.Cmd}}' argoproj/argosay:v2 @@ -45,12 +45,11 @@ a [configuration item](workflow-controller-configmap.yaml). The emissary will exit with code 64 if it fails. This may indicate a bug in the emissary. +## Docker (docker) -## Docker (docker) +⚠️Deprecated. Removed in v3.4. -⚠️Deprecated. - -**default in <= v3.2** +Default in <= v3.2. * Least secure: * It requires `privileged` access to `docker.sock` of the host to be mounted which. Often rejected by Open Policy Agent (OPA) or your Pod Security Policy (PSP). @@ -67,6 +66,8 @@ The emissary will exit with code 64 if it fails. This may indicate a bug in the ## Kubelet (kubelet) +⚠️Deprecated. Removed in v3.4. + * Secure * No `privileged` access * Cannot escape the privileges of the pod's service account @@ -74,15 +75,15 @@ The emissary will exit with code 64 if it fails. This may indicate a bug in the * Scalable: * Operations performed against the local Kubelet * Artifacts: - * Output artifacts must be saved on volumes (e.g. [emptyDir](empty-dir.md)) and not the base image layer (e.g. `/tmp`) + * Output artifacts must be saved on volumes (e.g. [empty-dir](empty-dir.md)) and not the base image layer (e.g. `/tmp`) * Step/Task result: * Warnings that normally goes to stderr will get captured in a step or a dag task's `outputs.result`. May require changes if your pipeline is conditioned on `steps/tasks.name.outputs.result` * Configuration: * Additional Kubelet configuration maybe needed -## Kubernetes API (k8sapi) +## Kubernetes API (`k8sapi`) -⚠️Deprecated. +⚠️Deprecated. Removed in v3.4. * Reliability: * Works on GKE Autopilot @@ -93,18 +94,20 @@ The emissary will exit with code 64 if it fails. This may indicate a bug in the * Least scalable: * Log retrieval and container operations performed against the remote Kubernetes API * Artifacts: - * Output artifacts must be saved on volumes (e.g. [emptyDir](empty-dir.md)) and not the base image layer (e.g. `/tmp`) + * Output artifacts must be saved on volumes (e.g. [empty-dir](empty-dir.md)) and not the base image layer (e.g. `/tmp`) * Step/Task result: * Warnings that normally goes to stderr will get captured in a step or a dag task's `outputs.result`. May require changes if your pipeline is conditioned on `steps/tasks.name.outputs.result` * Configuration: * No additional configuration needed. -## Process Namespace Sharing (pns) +## Process Namespace Sharing (`pns`) + +⚠️Deprecated. Removed in v3.4. * More secure: * No `privileged` access * cannot escape the privileges of the pod's service account - * Can [`runAsNonRoot`](workflow-pod-security-context.md), if you use volumes (e.g. [emptyDir](empty-dir.md)) for your output artifacts + * Can [`runAsNonRoot`](workflow-pod-security-context.md), if you use volumes (e.g. [empty-dir](empty-dir.md)) for your output artifacts * Processes are visible to other containers in the pod. This includes all information visible in /proc, such as passwords that were passed as arguments or environment variables. These are protected only by regular Unix permissions. * Scalable: * Most operations use local `procfs`. @@ -118,4 +121,4 @@ The emissary will exit with code 64 if it fails. This may indicate a bug in the * Process will no longer run with PID 1 * [Doesn't work for Windows containers](https://kubernetes.io/docs/setup/production-environment/windows/intro-windows-in-kubernetes/#v1-pod). -[https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/](https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/) +[Learn more](https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/) diff --git a/docs/workflow-inputs.md b/docs/workflow-inputs.md index 1152badd938b..989d94885570 100644 --- a/docs/workflow-inputs.md +++ b/docs/workflow-inputs.md @@ -1,7 +1,5 @@ # Workflow Inputs -![GA](assets/ga.svg) - ## Introduction `Workflows` and `template`s operate on a set of defined parameters and arguments that are supplied to the running container. The precise details of how to manage the inputs can be confusing; this article attempts to clarify concepts and provide simple working examples to illustrate the various configuration options. @@ -10,26 +8,29 @@ The examples below are limited to `DAGTemplate`s and mainly focused on `paramete ### Parameter Inputs -First, some clarification of terms is needed. For a glossary reference, see [Argo Core Concepts](https://argoproj.github.io/argo-workflows/core-concepts/). +First, some clarification of terms is needed. For a glossary reference, see [Argo Core Concepts](workflow-concepts.md). A `workflow` provides `arguments`, which are passed in to the entry point template. A `template` defines `inputs` which are then provided by template callers (such as `steps`, `dag`, or even a `workflow`). The structure of both is identical. For example, in a `Workflow`, one parameter would look like this: -``` + +```yaml arguments: parameters: - name: workflow-param-1 ``` And in a `template`: -``` + +```yaml inputs: parameters: - name: template-param-1 ``` Inputs to `DAGTemplate`s use the `arguments` format: -``` + +```yaml dag: tasks: - name: step-A @@ -41,7 +42,8 @@ dag: ``` Previous examples in context: -``` + +```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: @@ -76,8 +78,10 @@ spec: To run this example: `argo submit -n argo example.yaml -p 'workflow-param-1="abcd"' --watch` ### Using Previous Step Outputs As Inputs + In `DAGTemplate`s, it is common to want to take the output of one step and send it as the input to another step. However, there is a difference in how this works for artifacts vs parameters. Suppose our `step-template-A` defines some outputs: -``` + +```yaml outputs: parameters: - name: output-param-1 @@ -89,7 +93,8 @@ outputs: ``` In my `DAGTemplate`, I can send these outputs to another template like this: -``` + +```yaml dag: tasks: - name: step-A diff --git a/docs/workflow-notifications.md b/docs/workflow-notifications.md index 7ae446fd23f0..c8ea68413e09 100644 --- a/docs/workflow-notifications.md +++ b/docs/workflow-notifications.md @@ -10,4 +10,4 @@ You have options: 1. For individual workflows, can add an exit handler to your workflow, [for example](https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/exit-handlers.yaml). 1. If you want the same for every workflow, you can add an exit handler to [the default workflow spec](default-workflow-specs.md). -1. Use a service (e.g. [Heptio Labs EventRouter](https://github.com/heptiolabs/eventrouter)) to the [Workflow events](workflow-events.md) we emit. \ No newline at end of file +1. Use a service (e.g. [Heptio Labs EventRouter](https://github.com/heptiolabs/eventrouter)) to the [Workflow events](workflow-events.md) we emit. diff --git a/docs/workflow-of-workflows.md b/docs/workflow-of-workflows.md index 4968df817fa2..075a896ed063 100644 --- a/docs/workflow-of-workflows.md +++ b/docs/workflow-of-workflows.md @@ -3,10 +3,13 @@ > v2.9 and after ## Introduction -The Workflow of Workflows pattern involves a parent workflow triggering one or more child workflows, managing them, and acting their results. - + +The Workflow of Workflows pattern involves a parent workflow triggering one or more child workflows, managing them, and acting on their results. + ## Examples + You can use `workflowTemplateRef` to trigger a workflow inline. + 1. Define your workflow as a `workflowtemplate`. ```yaml @@ -30,11 +33,13 @@ spec: command: [cowsay] args: ["{{inputs.parameters.message}}"] ``` -2. Create the `Workflowtemplate` in cluster using `argo template create ` -3. Define the workflow of workflows. + +1. Create the `Workflowtemplate` in cluster using `argo template create ` +2. Define the workflow of workflows. + ```yaml # This template demonstrates a workflow of workflows. -# Workflow triggers one or more workflow and manage it. +# Workflow triggers one or more workflows and manages them. apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: diff --git a/docs/workflow-pod-security-context.md b/docs/workflow-pod-security-context.md index 92413fcf65e7..1247f0304bbb 100644 --- a/docs/workflow-pod-security-context.md +++ b/docs/workflow-pod-security-context.md @@ -1,6 +1,6 @@ # Workflow Pod Security Context -By default, a workflow pods run as root. The Docker executor even requires `privileged: true`. +By default, all workflow pods run as root. The Docker executor even requires `privileged: true`. For other [workflow executors](workflow-executors.md), you can run your workflow pods more securely by configuring the [security context](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) for your workflow pod. diff --git a/docs/workflow-rbac.md b/docs/workflow-rbac.md index 672c24d15146..0b2d974982cb 100644 --- a/docs/workflow-rbac.md +++ b/docs/workflow-rbac.md @@ -21,7 +21,7 @@ rules: - apiGroups: - argoproj.io resources: - - workflowtaskresult + - workflowtaskresults verbs: - create - patch @@ -44,7 +44,7 @@ rules: - patch ``` -Warning: For many organisations, it may not be acceptable to give a workflow the `pod patch` permission, see [#3961](https://github.com/argoproj/argo-workflows/issues/3961) +Warning: For many organizations, it may not be acceptable to give a workflow the `pod patch` permission, see [#3961](https://github.com/argoproj/argo-workflows/issues/3961) If you are not using the emissary, you'll need additional permissions. See [executor](https://github.com/argoproj/argo-workflows/tree/master/manifests/quick-start/base/executor) for suitable diff --git a/docs/workflow-restrictions.md b/docs/workflow-restrictions.md index dfce2c302ba6..7b7c03a5aa7d 100644 --- a/docs/workflow-restrictions.md +++ b/docs/workflow-restrictions.md @@ -1,17 +1,15 @@ # Workflow Restrictions -![GA](assets/ga.svg) - > v2.9 and after ## Introduction -As the administrator of the controller, you may want to limit which types of Workflows your users can run. Setting workflow restrictions allows you to ensure that Workflows comply with certain requirements. +As the administrator of the controller, you may want to limit which types of Workflows your users can run. Setting workflow restrictions allows you to ensure that Workflows comply with certain requirements. ## Available Restrictions -* `templateReferencing: Strict`: Only Workflows using "workflowTemplateRef" will be processed. This allows the administrator of the controller to set a "library" of templates that may be run by its operator, limiting arbitrary Workflow execution. -* `templateReferencing: Secure`: Only Workflows using "workflowTemplateRef" will be processed and the controller will enforce that the WorkflowTemplate that is referenced hasn't changed between operations. If you want to make sure the operator of the Workflow cannot run an arbitrary Workflow, use this option. +* `templateReferencing: Strict`: Only Workflows using `workflowTemplateRef` will be processed. This allows the administrator of the controller to set a "library" of templates that may be run by its operator, limiting arbitrary Workflow execution. +* `templateReferencing: Secure`: Only Workflows using `workflowTemplateRef` will be processed and the controller will enforce that the workflow template that is referenced hasn't changed between operations. If you want to make sure the operator of the Workflow cannot run an arbitrary Workflow, use this option. ## Setting Workflow Restrictions diff --git a/docs/workflow-submitting-workflow.md b/docs/workflow-submitting-workflow.md index 7f3f27365565..cec0bb731501 100644 --- a/docs/workflow-submitting-workflow.md +++ b/docs/workflow-submitting-workflow.md @@ -1,7 +1,5 @@ # One Workflow Submitting Another -![GA](assets/ga.svg) - > v2.8 and after If you want one workflow to create another, you can do this using `curl`. You'll need an [access token](access-token.md). Typically the best way is to submit from a workflow template: @@ -29,10 +27,3 @@ spec: -H "Authorization: Bearer eyJhbGci..." \ -d '{"resourceKind": "WorkflowTemplate", "resourceName": "wait", "submitOptions": {"labels": "workflows.argoproj.io/workflow-template=wait"}}' ``` - -See also: - -* [access token](access-token.md) -* [resuming a workflow via automation](resuming-workflow-via-automation.md) -* [submitting a workflow via automation](submit-workflow-via-automation.md) -* [async pattern](async-pattern.md) diff --git a/docs/workflow-templates.md b/docs/workflow-templates.md index bfcc41003aab..8dc4d6822132 100644 --- a/docs/workflow-templates.md +++ b/docs/workflow-templates.md @@ -1,7 +1,5 @@ # Workflow Templates -![GA](assets/ga.svg) - > v2.4 and after ## Introduction @@ -19,8 +17,9 @@ in the past. However, a quick description should clarify each and their differen `Workflow`, you must define at least one (but usually more than one) `template` to run. This `template` can be of type `container`, `script`, `dag`, `steps`, `resource`, or `suspend` and can be referenced by an `entrypoint` or by other `dag`, and `step` templates. - + Here is an example of a `Workflow` with two `templates`: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -130,8 +129,9 @@ spec: When working with parameters in a `WorkflowTemplate`, please note the following: -1. When working with global parameters, you can instantiate your global variables in your `Workflow` +- When working with global parameters, you can instantiate your global variables in your `Workflow` and then directly reference them in your `WorkflowTemplate`. Below is a working example: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: WorkflowTemplate @@ -165,8 +165,10 @@ spec: name: hello-world-template-global-arg template: hello-world ``` -2. When working with local parameters, the values of local parameters must be supplied at the template definition inside -the `WorkflowTemplate`. Below is a working example: + +- When working with local parameters, the values of local parameters must be supplied at the template definition inside +the `WorkflowTemplate`. Below is a working example: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: WorkflowTemplate @@ -205,6 +207,7 @@ You can reference `templates` from another `WorkflowTemplates` (see the [differe Just as how you reference other `templates` within the same `Workflow`, you should do so from a `steps` or `dag` template. Here is an example from a `steps` template: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -226,6 +229,7 @@ spec: ``` You can also do so similarly with a `dag` template: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -252,6 +256,7 @@ This includes both using `template` and `templateRef`. This behavior is deprecated, no longer supported, and will be removed in a future version. Here is an example of a **deprecated** reference that **should not be used**: + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -278,9 +283,12 @@ to pass in "live" arguments and reference other templates (those other templates This behavior has been problematic and dangerous. It causes confusion and has design inconsistencies. > 2.9 and after -#### Create `Workflow` from `WorkflowTemplate` Spec -You can create `Workflow` from `WorkflowTemplate` spec using `workflowTemplateRef`. If you pass the arguments to created `Workflow`, it will be merged with WorkflowTemplate arguments. + +### Create `Workflow` from `WorkflowTemplate` Spec + +You can create `Workflow` from `WorkflowTemplate` spec using `workflowTemplateRef`. If you pass the arguments to created `Workflow`, it will be merged with workflow template arguments. Here is an example for referring `WorkflowTemplate` as Workflow with passing `entrypoint` and `Workflow Arguments` to `WorkflowTemplate` + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -297,6 +305,7 @@ spec: ``` Here is an example of a referring `WorkflowTemplate` as Workflow and using `WorkflowTemplates`'s `entrypoint` and `Workflow Arguments` + ```yaml apiVersion: argoproj.io/v1alpha1 kind: Workflow @@ -314,13 +323,13 @@ spec: You can create some example templates as follows: -``` +```bash argo template create https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/workflow-template/templates.yaml ``` Then submit a workflow using one of those templates: -``` +```bash argo submit https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/workflow-template/hello-world.yaml ``` @@ -328,18 +337,16 @@ argo submit https://raw.githubusercontent.com/argoproj/argo-workflows/master/exa Then submit a `WorkflowTemplate` as a `Workflow`: -```sh +```bash argo submit --from workflowtemplate/workflow-template-submittable ``` If you need to submit a `WorkflowTemplate` as a `Workflow` with parameters: -```sh +```bash argo submit --from workflowtemplate/workflow-template-submittable -p param1=value1 ``` - - ### `kubectl` Using `kubectl apply -f` and `kubectl get wftmpl` diff --git a/errors/errors.go b/errors/errors.go index 2b146e8c2743..957f4b1319fc 100644 --- a/errors/errors.go +++ b/errors/errors.go @@ -4,6 +4,7 @@ import ( "encoding/json" "errors" "fmt" + "net/http" ) // Externally visible error codes @@ -22,6 +23,7 @@ const ( type ArgoError interface { Error() string Code() string + HTTPCode() int JSON() []byte } @@ -138,6 +140,25 @@ func (e argoerr) JSON() []byte { return j } +func (e argoerr) HTTPCode() int { + switch e.Code() { + case CodeUnauthorized: + return http.StatusUnauthorized + case CodeForbidden: + return http.StatusForbidden + case CodeNotFound: + return http.StatusNotFound + case CodeBadRequest: + return http.StatusBadRequest + case CodeNotImplemented: + return http.StatusNotImplemented + case CodeTimeout, CodeInternal: + return http.StatusInternalServerError + default: + return http.StatusInternalServerError + } +} + // IsCode is a helper to determine if the error is of a specific code func IsCode(code string, err error) bool { if argoErr, ok := err.(argoerr); ok { diff --git a/examples/README.md b/examples/README.md index 6a8dbaf5032b..8cb416a8781b 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,1563 +1,3 @@ # Documentation by Example -## Welcome! - -Argo is an open source project that provides container-native workflows for Kubernetes. Each step in an Argo workflow is defined as a container. - -Argo is implemented as a Kubernetes CRD (Custom Resource Definition). As a result, Argo workflows can be managed using `kubectl` and natively integrates with other Kubernetes services such as volumes, secrets, and RBAC. The new Argo software is light-weight and installs in under a minute, and provides complete workflow features including parameter substitution, artifacts, fixtures, loops and recursive workflows. - -Many of the Argo examples used in this walkthrough are available in the [`/examples` directory](https://github.com/argoproj/argo-workflows/tree/master/examples) on GitHub. If you like this project, please give us a star! - -For a complete description of the Argo workflow spec, please refer to [the spec documentation](https://argoproj.github.io/argo-workflows/fields/#workflowspec). - -## Table of Contents - -1. [Argo CLI](#argo-cli) -1. [Hello World!](#hello-world) -1. [Parameters](#parameters) -1. [Steps](#steps) -1. [DAG](#dag) -1. [Artifacts](#artifacts) -1. [The Structure of Workflow Specs](#the-structure-of-workflow-specs) -1. [Secrets](#secrets) -1. [Scripts & Results](#scripts--results) -1. [Output Parameters](#output-parameters) -1. [Loops](#loops) -1. [Conditionals](#conditionals) -1. [Retrying Failed or Errored Steps](#retrying-failed-or-errored-steps) -1. [Recursion](#recursion) -1. [Exit Handlers](#exit-handlers) -1. [Timeouts](#timeouts) -1. [Volumes](#volumes) -1. [Suspending](#suspending) -1. [Daemon Containers](#daemon-containers) -1. [Sidecars](#sidecars) -1. [Hardwired Artifacts](#hardwired-artifacts) -1. [Kubernetes Resources](#kubernetes-resources) -1. [Docker-in-Docker Using Sidecars](#docker-in-docker-using-sidecars) -1. [Custom Template Variable Reference](#custom-template-variable-reference) -1. [Continuous Integration Example](#continuous-integration-example) - -## Argo CLI - -In case you want to follow along with this walkthrough, here's a quick overview of the most useful argo command line interface (CLI) commands. - -```sh -argo submit hello-world.yaml # submit a workflow spec to Kubernetes -argo list # list current workflows -argo get hello-world-xxx # get info about a specific workflow -argo logs hello-world-xxx # print the logs from a workflow -argo delete hello-world-xxx # delete workflow -``` - -You can also run workflow specs directly using `kubectl` but the Argo CLI provides syntax checking, nicer output, and requires less typing. - -```sh -kubectl create -f hello-world.yaml -kubectl get wf -kubectl get wf hello-world-xxx -kubectl get po --selector=workflows.argoproj.io/workflow=hello-world-xxx --show-all # similar to argo -kubectl logs hello-world-xxx-yyy -c main -kubectl delete wf hello-world-xxx -``` - -## Hello World! - -Let's start by creating a very simple workflow template to echo "hello world" using the docker/whalesay container image from DockerHub. - -You can run this directly from your shell with a simple docker command: - -```sh -$ docker run docker/whalesay cowsay "hello world" - _____________ -< hello world > - ------------- - \ - \ - \ - ## . - ## ## ## == - ## ## ## ## === - /""""""""""""""""___/ === - ~~~ {~~ ~~~~ ~~~ ~~~~ ~~ ~ / ===- ~~~ - \______ o __/ - \ \ __/ - \____\______/ - - -Hello from Docker! -This message shows that your installation appears to be working correctly. -``` - -Below, we run the same container on a Kubernetes cluster using an Argo workflow template. -Be sure to read the comments as they provide useful explanations. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow # new type of k8s spec -metadata: - generateName: hello-world- # name of the workflow spec -spec: - entrypoint: whalesay # invoke the whalesay template - templates: - - name: whalesay # name of the template - container: - image: docker/whalesay - command: [cowsay] - args: ["hello world"] - resources: # limit the resources - limits: - memory: 32Mi - cpu: 100m -``` - -Argo adds a new `kind` of Kubernetes spec called a `Workflow`. The above spec contains a single `template` called `whalesay` which runs the `docker/whalesay` container and invokes `cowsay "hello world"`. The `whalesay` template is the `entrypoint` for the spec. The entrypoint specifies the initial template that should be invoked when the workflow spec is executed by Kubernetes. Being able to specify the entrypoint is more useful when there is more than one template defined in the Kubernetes workflow spec. :-) - -## Parameters - -Let's look at a slightly more complex workflow spec with parameters. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: hello-world-parameters- -spec: - # invoke the whalesay template with - # "hello world" as the argument - # to the message parameter - entrypoint: whalesay - arguments: - parameters: - - name: message - value: hello world - - templates: - - name: whalesay - inputs: - parameters: - - name: message # parameter declaration - container: - # run cowsay with that message input parameter as args - image: docker/whalesay - command: [cowsay] - args: ["{{inputs.parameters.message}}"] -``` - -This time, the `whalesay` template takes an input parameter named `message` that is passed as the `args` to the `cowsay` command. In order to reference parameters (e.g., ``"{{inputs.parameters.message}}"``), the parameters must be enclosed in double quotes to escape the curly braces in YAML. - -The argo CLI provides a convenient way to override parameters used to invoke the entrypoint. For example, the following command would bind the `message` parameter to "goodbye world" instead of the default "hello world". - -```sh -argo submit arguments-parameters.yaml -p message="goodbye world" -``` - -In case of multiple parameters that can be overriten, the argo CLI provides a command to load parameters files in YAML or JSON format. Here is an example of that kind of parameter file: - -```yaml -message: goodbye world -``` - -To run use following command: - -```sh -argo submit arguments-parameters.yaml --parameter-file params.yaml -``` - -Command-line parameters can also be used to override the default entrypoint and invoke any template in the workflow spec. For example, if you add a new version of the `whalesay` template called `whalesay-caps` but you don't want to change the default entrypoint, you can invoke this from the command line as follows: - -```sh -argo submit arguments-parameters.yaml --entrypoint whalesay-caps -``` - -By using a combination of the `--entrypoint` and `-p` parameters, you can call any template in the workflow spec with any parameter that you like. - -The values set in the `spec.arguments.parameters` are globally scoped and can be accessed via `{{workflow.parameters.parameter_name}}`. This can be useful to pass information to multiple steps in a workflow. For example, if you wanted to run your workflows with different logging levels that are set in the environment of each container, you could have a YAML file similar to this one: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: global-parameters- -spec: - entrypoint: A - arguments: - parameters: - - name: log-level - value: INFO - - templates: - - name: A - container: - image: containerA - env: - - name: LOG_LEVEL - value: "{{workflow.parameters.log-level}}" - command: [runA] - - name: B - container: - image: containerB - env: - - name: LOG_LEVEL - value: "{{workflow.parameters.log-level}}" - command: [runB] -``` - -In this workflow, both steps `A` and `B` would have the same log-level set to `INFO` and can easily be changed between workflow submissions using the `-p` flag. - -## Steps - -In this example, we'll see how to create multi-step workflows, how to define more than one template in a workflow spec, and how to create nested workflows. Be sure to read the comments as they provide useful explanations. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: steps- -spec: - entrypoint: hello-hello-hello - - # This spec contains two templates: hello-hello-hello and whalesay - templates: - - name: hello-hello-hello - # Instead of just running a container - # This template has a sequence of steps - steps: - - - name: hello1 # hello1 is run before the following steps - template: whalesay - arguments: - parameters: - - name: message - value: "hello1" - - - name: hello2a # double dash => run after previous step - template: whalesay - arguments: - parameters: - - name: message - value: "hello2a" - - name: hello2b # single dash => run in parallel with previous step - template: whalesay - arguments: - parameters: - - name: message - value: "hello2b" - - # This is the same template as from the previous example - - name: whalesay - inputs: - parameters: - - name: message - container: - image: docker/whalesay - command: [cowsay] - args: ["{{inputs.parameters.message}}"] -``` - -The above workflow spec prints three different flavors of "hello". The `hello-hello-hello` template consists of three `steps`. The first step named `hello1` will be run in sequence whereas the next two steps named `hello2a` and `hello2b` will be run in parallel with each other. Using the argo CLI command, we can graphically display the execution history of this workflow spec, which shows that the steps named `hello2a` and `hello2b` ran in parallel with each other. - -```sh -STEP TEMPLATE PODNAME DURATION MESSAGE - ✔ steps-z2zdn hello-hello-hello - ├───✔ hello1 whalesay steps-z2zdn-27420706 2s - └─┬─✔ hello2a whalesay steps-z2zdn-2006760091 3s - └─✔ hello2b whalesay steps-z2zdn-2023537710 3s -``` - -## DAG - -As an alternative to specifying sequences of steps, you can define the workflow as a directed-acyclic graph (DAG) by specifying the dependencies of each task. This can be simpler to maintain for complex workflows and allows for maximum parallelism when running tasks. - -In the following workflow, step `A` runs first, as it has no dependencies. Once `A` has finished, steps `B` and `C` run in parallel. Finally, once `B` and `C` have completed, step `D` can run. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: dag-diamond- -spec: - entrypoint: diamond - templates: - - name: echo - inputs: - parameters: - - name: message - container: - image: alpine:3.7 - command: [echo, "{{inputs.parameters.message}}"] - - name: diamond - dag: - tasks: - - name: A - template: echo - arguments: - parameters: [{name: message, value: A}] - - name: B - dependencies: [A] - template: echo - arguments: - parameters: [{name: message, value: B}] - - name: C - dependencies: [A] - template: echo - arguments: - parameters: [{name: message, value: C}] - - name: D - dependencies: [B, C] - template: echo - arguments: - parameters: [{name: message, value: D}] -``` - -The dependency graph may have [multiple roots](./dag-multiroot.yaml). The templates called from a DAG or steps template can themselves be DAG or steps templates. This can allow for complex workflows to be split into manageable pieces. - -The DAG logic has a built-in `fail fast` feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. -The [FailFast](./dag-disable-failFast.yaml) flag default is `true`, if set to `false`, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at [here](https://github.com/argoproj/argo-workflows/issues/1442). -## Artifacts - -**Note:** -You will need to configure an artifact repository to run this example. -[Configuring an artifact repository here](https://argoproj.github.io/argo-workflows/configure-artifact-repository/). - -When running workflows, it is very common to have steps that generate or consume artifacts. Often, the output artifacts of one step may be used as input artifacts to a subsequent step. - -The below workflow spec consists of two steps that run in sequence. The first step named `generate-artifact` will generate an artifact using the `whalesay` template that will be consumed by the second step named `print-message` that then consumes the generated artifact. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: artifact-passing- -spec: - entrypoint: artifact-example - templates: - - name: artifact-example - steps: - - - name: generate-artifact - template: whalesay - - - name: consume-artifact - template: print-message - arguments: - artifacts: - # bind message to the hello-art artifact - # generated by the generate-artifact step - - name: message - from: "{{steps.generate-artifact.outputs.artifacts.hello-art}}" - - - name: whalesay - container: - image: docker/whalesay:latest - command: [sh, -c] - args: ["cowsay hello world | tee /tmp/hello_world.txt"] - outputs: - artifacts: - # generate hello-art artifact from /tmp/hello_world.txt - # artifacts can be directories as well as files - - name: hello-art - path: /tmp/hello_world.txt - - - name: print-message - inputs: - artifacts: - # unpack the message input artifact - # and put it at /tmp/message - - name: message - path: /tmp/message - container: - image: alpine:latest - command: [sh, -c] - args: ["cat /tmp/message"] -``` - -The `whalesay` template uses the `cowsay` command to generate a file named `/tmp/hello-world.txt`. It then `outputs` this file as an artifact named `hello-art`. In general, the artifact's `path` may be a directory rather than just a file. The `print-message` template takes an input artifact named `message`, unpacks it at the `path` named `/tmp/message` and then prints the contents of `/tmp/message` using the `cat` command. -The `artifact-example` template passes the `hello-art` artifact generated as an output of the `generate-artifact` step as the `message` input artifact to the `print-message` step. DAG templates use the tasks prefix to refer to another task, for example `{{tasks.generate-artifact.outputs.artifacts.hello-art}}`. - -Artifacts are packaged as Tarballs and gzipped by default. You may customize this behavior by specifying an archive strategy, using the `archive` field. For example: - -```yaml -<... snipped ...> - outputs: - artifacts: - # default behavior - tar+gzip default compression. - - name: hello-art-1 - path: /tmp/hello_world.txt - - # disable archiving entirely - upload the file / directory as is. - # this is useful when the container layout matches the desired target repository layout. - - name: hello-art-2 - path: /tmp/hello_world.txt - archive: - none: {} - - # customize the compression behavior (disabling it here). - # this is useful for files with varying compression benefits, - # e.g. disabling compression for a cached build workspace and large binaries, - # or increasing compression for "perfect" textual data - like a json/xml export of a large database. - - name: hello-art-3 - path: /tmp/hello_world.txt - archive: - tar: - # no compression (also accepts the standard gzip 1 to 9 values) - compressionLevel: 0 -<... snipped ...> -``` - -## The Structure of Workflow Specs - -We now know enough about the basic components of a workflow spec to review its basic structure: - -- Kubernetes header including metadata -- Spec body - - Entrypoint invocation with optionally arguments - - List of template definitions - -- For each template definition - - Name of the template - - Optionally a list of inputs - - Optionally a list of outputs - - Container invocation (leaf template) or a list of steps - - For each step, a template invocation - -To summarize, workflow specs are composed of a set of Argo templates where each template consists of an optional input section, an optional output section and either a container invocation or a list of steps where each step invokes another template. - -Note that the container section of the workflow spec will accept the same options as the container section of a pod spec, including but not limited to environment variables, secrets, and volume mounts. Similarly, for volume claims and volumes. - -## Secrets - -Argo supports the same secrets syntax and mechanisms as Kubernetes Pod specs, which allows access to secrets as environment variables or volume mounts. See the [Kubernetes documentation](https://kubernetes.io/docs/concepts/configuration/secret/) for more information. - -```yaml -# To run this example, first create the secret by running: -# kubectl create secret generic my-secret --from-literal=mypassword=S00perS3cretPa55word -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: secret-example- -spec: - entrypoint: whalesay - # To access secrets as files, add a volume entry in spec.volumes[] and - # then in the container template spec, add a mount using volumeMounts. - volumes: - - name: my-secret-vol - secret: - secretName: my-secret # name of an existing k8s secret - templates: - - name: whalesay - container: - image: alpine:3.7 - command: [sh, -c] - args: [' - echo "secret from env: $MYSECRETPASSWORD"; - echo "secret from file: `cat /secret/mountpath/mypassword`" - '] - # To access secrets as environment variables, use the k8s valueFrom and - # secretKeyRef constructs. - env: - - name: MYSECRETPASSWORD # name of env var - valueFrom: - secretKeyRef: - name: my-secret # name of an existing k8s secret - key: mypassword # 'key' subcomponent of the secret - volumeMounts: - - name: my-secret-vol # mount file containing secret at /secret/mountpath - mountPath: "/secret/mountpath" -``` - -## Scripts & Results - -Often, we just want a template that executes a script specified as a here-script (also known as a `here document`) in the workflow spec. This example shows how to do that: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: scripts-bash- -spec: - entrypoint: bash-script-example - templates: - - name: bash-script-example - steps: - - - name: generate - template: gen-random-int-bash - - - name: print - template: print-message - arguments: - parameters: - - name: message - value: "{{steps.generate.outputs.result}}" # The result of the here-script - - - name: gen-random-int-bash - script: - image: debian:9.4 - command: [bash] - source: | # Contents of the here-script - cat /dev/urandom | od -N2 -An -i | awk -v f=1 -v r=100 '{printf "%i\n", f + r * $1 / 65536}' - - - name: gen-random-int-python - script: - image: python:alpine3.6 - command: [python] - source: | - import random - i = random.randint(1, 100) - print(i) - - - name: gen-random-int-javascript - script: - image: node:9.1-alpine - command: [node] - source: | - var rand = Math.floor(Math.random() * 100); - console.log(rand); - - - name: print-message - inputs: - parameters: - - name: message - container: - image: alpine:latest - command: [sh, -c] - args: ["echo result was: {{inputs.parameters.message}}"] -``` - -The `script` keyword allows the specification of the script body using the `source` tag. This creates a temporary file containing the script body and then passes the name of the temporary file as the final parameter to `command`, which should be an interpreter that executes the script body. - -The use of the `script` feature also assigns the standard output of running the script to a special output parameter named `result`. This allows you to use the result of running the script itself in the rest of the workflow spec. In this example, the result is simply echoed by the print-message template. - -## Output Parameters - -Output parameters provide a general mechanism to use the result of a step as a parameter rather than as an artifact. This allows you to use the result from any type of step, not just a `script`, for conditional tests, loops, and arguments. Output parameters work similarly to `script result` except that the value of the output parameter is set to the contents of a generated file rather than the contents of `stdout`. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: output-parameter- -spec: - entrypoint: output-parameter - templates: - - name: output-parameter - steps: - - - name: generate-parameter - template: whalesay - - - name: consume-parameter - template: print-message - arguments: - parameters: - # Pass the hello-param output from the generate-parameter step as the message input to print-message - - name: message - value: "{{steps.generate-parameter.outputs.parameters.hello-param}}" - - - name: whalesay - container: - image: docker/whalesay:latest - command: [sh, -c] - args: ["echo -n hello world > /tmp/hello_world.txt"] # generate the content of hello_world.txt - outputs: - parameters: - - name: hello-param # name of output parameter - valueFrom: - path: /tmp/hello_world.txt # set the value of hello-param to the contents of this hello-world.txt - - - name: print-message - inputs: - parameters: - - name: message - container: - image: docker/whalesay:latest - command: [cowsay] - args: ["{{inputs.parameters.message}}"] -``` - -DAG templates use the tasks prefix to refer to another task, for example `{{tasks.generate-parameter.outputs.parameters.hello-param}}`. - -## Loops - -When writing workflows, it is often very useful to be able to iterate over a set of inputs as shown in this example: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: loops- -spec: - entrypoint: loop-example - templates: - - name: loop-example - steps: - - - name: print-message - template: whalesay - arguments: - parameters: - - name: message - value: "{{item}}" - withItems: # invoke whalesay once for each item in parallel - - hello world # item 1 - - goodbye world # item 2 - - - name: whalesay - inputs: - parameters: - - name: message - container: - image: docker/whalesay:latest - command: [cowsay] - args: ["{{inputs.parameters.message}}"] -``` - -We can also iterate over sets of items: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: loops-maps- -spec: - entrypoint: loop-map-example - templates: - - name: loop-map-example - steps: - - - name: test-linux - template: cat-os-release - arguments: - parameters: - - name: image - value: "{{item.image}}" - - name: tag - value: "{{item.tag}}" - withItems: - - { image: 'debian', tag: '9.1' } #item set 1 - - { image: 'debian', tag: '8.9' } #item set 2 - - { image: 'alpine', tag: '3.6' } #item set 3 - - { image: 'ubuntu', tag: '17.10' } #item set 4 - - - name: cat-os-release - inputs: - parameters: - - name: image - - name: tag - container: - image: "{{inputs.parameters.image}}:{{inputs.parameters.tag}}" - command: [cat] - args: [/etc/os-release] -``` - -We can pass lists of items as parameters: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: loops-param-arg- -spec: - entrypoint: loop-param-arg-example - arguments: - parameters: - - name: os-list # a list of items - value: | - [ - { "image": "debian", "tag": "9.1" }, - { "image": "debian", "tag": "8.9" }, - { "image": "alpine", "tag": "3.6" }, - { "image": "ubuntu", "tag": "17.10" } - ] - - templates: - - name: loop-param-arg-example - inputs: - parameters: - - name: os-list - steps: - - - name: test-linux - template: cat-os-release - arguments: - parameters: - - name: image - value: "{{item.image}}" - - name: tag - value: "{{item.tag}}" - withParam: "{{inputs.parameters.os-list}}" # parameter specifies the list to iterate over - - # This template is the same as in the previous example - - name: cat-os-release - inputs: - parameters: - - name: image - - name: tag - container: - image: "{{inputs.parameters.image}}:{{inputs.parameters.tag}}" - command: [cat] - args: [/etc/os-release] -``` - -We can even dynamically generate the list of items to iterate over! - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: loops-param-result- -spec: - entrypoint: loop-param-result-example - templates: - - name: loop-param-result-example - steps: - - - name: generate - template: gen-number-list - # Iterate over the list of numbers generated by the generate step above - - - name: sleep - template: sleep-n-sec - arguments: - parameters: - - name: seconds - value: "{{item}}" - withParam: "{{steps.generate.outputs.result}}" - - # Generate a list of numbers in JSON format - - name: gen-number-list - script: - image: python:alpine3.6 - command: [python] - source: | - import json - import sys - json.dump([i for i in range(20, 31)], sys.stdout) - - - name: sleep-n-sec - inputs: - parameters: - - name: seconds - container: - image: alpine:latest - command: [sh, -c] - args: ["echo sleeping for {{inputs.parameters.seconds}} seconds; sleep {{inputs.parameters.seconds}}; echo done"] -``` - -## Conditionals - -We also support conditional execution. The syntax is implemented by [govaluate](https://github.com/Knetic/govaluate) which offers the support for complex syntax. See in the example: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: coinflip- -spec: - entrypoint: coinflip - templates: - - name: coinflip - steps: - # flip a coin - - - name: flip-coin - template: flip-coin - # evaluate the result in parallel - - - name: heads - template: heads # call heads template if "heads" - when: "{{steps.flip-coin.outputs.result}} == heads" - - name: tails - template: tails # call tails template if "tails" - when: "{{steps.flip-coin.outputs.result}} == tails" - - - name: flip-again - template: flip-coin - - - name: complex-condition - template: heads-tails-or-twice-tails - # call heads template if first flip was "heads" and second was "tails" OR both were "tails" - when: >- - ( {{steps.flip-coin.outputs.result}} == heads && - {{steps.flip-again.outputs.result}} == tails - ) || - ( {{steps.flip-coin.outputs.result}} == tails && - {{steps.flip-again.outputs.result}} == tails ) - - name: heads-regex - template: heads # call heads template if ~ "hea" - when: "{{steps.flip-again.outputs.result}} =~ hea" - - name: tails-regex - template: tails # call heads template if ~ "tai" - when: "{{steps.flip-again.outputs.result}} =~ tai" - - # Return heads or tails based on a random number - - name: flip-coin - script: - image: python:alpine3.6 - command: [python] - source: | - import random - result = "heads" if random.randint(0,1) == 0 else "tails" - print(result) - - - name: heads - container: - image: alpine:3.6 - command: [sh, -c] - args: ["echo \"it was heads\""] - - - name: tails - container: - image: alpine:3.6 - command: [sh, -c] - args: ["echo \"it was tails\""] - - - name: heads-tails-or-twice-tails - container: - image: alpine:3.6 - command: [sh, -c] - args: ["echo \"it was heads the first flip and tails the second. Or it was two times tails.\""] -``` - -!!! note - If the parameter value contains quotes, it may invalidate the govaluate expression. To handle parameters with - quotes, embed an [expr](https://github.com/antonmedv/expr) expression in the conditional. For example: - - ```yaml - when: "{{=inputs.parameters['may-contain-quotes'] == 'example'}}" - ``` - -## Retrying Failed or Errored Steps - -You can specify a `retryStrategy` that will dictate how failed or errored steps are retried: - -```yaml -# This example demonstrates the use of retry back offs -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: retry-backoff- -spec: - entrypoint: retry-backoff - templates: - - name: retry-backoff - retryStrategy: - limit: 10 - retryPolicy: "Always" - backoff: - duration: "1" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" - factor: 2 - maxDuration: "1m" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" - affinity: - nodeAntiAffinity: {} - container: - image: python:alpine3.6 - command: ["python", -c] - # fail with a 66% probability - args: ["import random; import sys; exit_code = random.choice([0, 1, 1]); sys.exit(exit_code)"] -``` - -* `limit` is the maximum number of times the container will be retried. -* `retryPolicy` specifies if a container will be retried on failure, error, both, or only transient errors (e.g. i/o or TLS handshake timeout). "Always" retries on both errors and failures. Also available: "OnFailure" (default), "OnError", and "OnTransientError" (available after v3.0.0-rc2). -* `backoff` is an exponential backoff -* `nodeAntiAffinity` prevents running steps on the same host. Current implementation allows only empty `nodeAntiAffinity` (i.e. `nodeAntiAffinity: {}`) and by default it uses label `kubernetes.io/hostname` as the selector. - -Providing an empty `retryStrategy` (i.e. `retryStrategy: {}`) will cause a container to retry until completion. - - -## Recursion - -Templates can recursively invoke each other! In this variation of the above coin-flip template, we continue to flip coins until it comes up heads. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: coinflip-recursive- -spec: - entrypoint: coinflip - templates: - - name: coinflip - steps: - # flip a coin - - - name: flip-coin - template: flip-coin - # evaluate the result in parallel - - - name: heads - template: heads # call heads template if "heads" - when: "{{steps.flip-coin.outputs.result}} == heads" - - name: tails # keep flipping coins if "tails" - template: coinflip - when: "{{steps.flip-coin.outputs.result}} == tails" - - - name: flip-coin - script: - image: python:alpine3.6 - command: [python] - source: | - import random - result = "heads" if random.randint(0,1) == 0 else "tails" - print(result) - - - name: heads - container: - image: alpine:3.6 - command: [sh, -c] - args: ["echo \"it was heads\""] -``` - -Here's the result of a couple of runs of coinflip for comparison. - -```sh -argo get coinflip-recursive-tzcb5 - -STEP PODNAME MESSAGE - ✔ coinflip-recursive-vhph5 - ├───✔ flip-coin coinflip-recursive-vhph5-2123890397 - └─┬─✔ heads coinflip-recursive-vhph5-128690560 - └─○ tails - -STEP PODNAME MESSAGE - ✔ coinflip-recursive-tzcb5 - ├───✔ flip-coin coinflip-recursive-tzcb5-322836820 - └─┬─○ heads - └─✔ tails - ├───✔ flip-coin coinflip-recursive-tzcb5-1863890320 - └─┬─○ heads - └─✔ tails - ├───✔ flip-coin coinflip-recursive-tzcb5-1768147140 - └─┬─○ heads - └─✔ tails - ├───✔ flip-coin coinflip-recursive-tzcb5-4080411136 - └─┬─✔ heads coinflip-recursive-tzcb5-4080323273 - └─○ tails -``` - -In the first run, the coin immediately comes up heads and we stop. In the second run, the coin comes up tail three times before it finally comes up heads and we stop. - -## Exit handlers - -An exit handler is a template that *always* executes, irrespective of success or failure, at the end of the workflow. - -Some common use cases of exit handlers are: - -- cleaning up after a workflow runs -- sending notifications of workflow status (e.g., e-mail/Slack) -- posting the pass/fail status to a webhook result (e.g. GitHub build result) -- resubmitting or submitting another workflow - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: exit-handlers- -spec: - entrypoint: intentional-fail - onExit: exit-handler # invoke exit-handler template at end of the workflow - templates: - # primary workflow template - - name: intentional-fail - container: - image: alpine:latest - command: [sh, -c] - args: ["echo intentional failure; exit 1"] - - # Exit handler templates - # After the completion of the entrypoint template, the status of the - # workflow is made available in the global variable {{workflow.status}}. - # {{workflow.status}} will be one of: Succeeded, Failed, Error - - name: exit-handler - steps: - - - name: notify - template: send-email - - name: celebrate - template: celebrate - when: "{{workflow.status}} == Succeeded" - - name: cry - template: cry - when: "{{workflow.status}} != Succeeded" - - name: send-email - container: - image: alpine:latest - command: [sh, -c] - args: ["echo send e-mail: {{workflow.name}} {{workflow.status}} {{workflow.duration}}"] - - name: celebrate - container: - image: alpine:latest - command: [sh, -c] - args: ["echo hooray!"] - - name: cry - container: - image: alpine:latest - command: [sh, -c] - args: ["echo boohoo!"] -``` - -## Timeouts - -To limit the elapsed time for a workflow, you can set the variable `activeDeadlineSeconds`. - -```yaml -# To enforce a timeout for a container template, specify a value for activeDeadlineSeconds. -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: timeouts- -spec: - entrypoint: sleep - templates: - - name: sleep - container: - image: alpine:latest - command: [sh, -c] - args: ["echo sleeping for 1m; sleep 60; echo done"] - activeDeadlineSeconds: 10 # terminate container template after 10 seconds -``` - -## Volumes - -The following example dynamically creates a volume and then uses the volume in a two step workflow. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: volumes-pvc- -spec: - entrypoint: volumes-pvc-example - volumeClaimTemplates: # define volume, same syntax as k8s Pod spec - - metadata: - name: workdir # name of volume claim - spec: - accessModes: [ "ReadWriteOnce" ] - resources: - requests: - storage: 1Gi # Gi => 1024 * 1024 * 1024 - - templates: - - name: volumes-pvc-example - steps: - - - name: generate - template: whalesay - - - name: print - template: print-message - - - name: whalesay - container: - image: docker/whalesay:latest - command: [sh, -c] - args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] - # Mount workdir volume at /mnt/vol before invoking docker/whalesay - volumeMounts: # same syntax as k8s Pod spec - - name: workdir - mountPath: /mnt/vol - - - name: print-message - container: - image: alpine:latest - command: [sh, -c] - args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] - # Mount workdir volume at /mnt/vol before invoking docker/whalesay - volumeMounts: # same syntax as k8s Pod spec - - name: workdir - mountPath: /mnt/vol - -``` - -Volumes are a very useful way to move large amounts of data from one step in a workflow to another. Depending on the system, some volumes may be accessible concurrently from multiple steps. - -In some cases, you want to access an already existing volume rather than creating/destroying one dynamically. - -```yaml -# Define Kubernetes PVC -kind: PersistentVolumeClaim -apiVersion: v1 -metadata: - name: my-existing-volume -spec: - accessModes: [ "ReadWriteOnce" ] - resources: - requests: - storage: 1Gi - ---- -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: volumes-existing- -spec: - entrypoint: volumes-existing-example - volumes: - # Pass my-existing-volume as an argument to the volumes-existing-example template - # Same syntax as k8s Pod spec - - name: workdir - persistentVolumeClaim: - claimName: my-existing-volume - - templates: - - name: volumes-existing-example - steps: - - - name: generate - template: whalesay - - - name: print - template: print-message - - - name: whalesay - container: - image: docker/whalesay:latest - command: [sh, -c] - args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] - volumeMounts: - - name: workdir - mountPath: /mnt/vol - - - name: print-message - container: - image: alpine:latest - command: [sh, -c] - args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] - volumeMounts: - - name: workdir - mountPath: /mnt/vol -``` - -It's also possible to declare existing volumes at the template level, instead of the workflow level. -This can be useful workflows that generate volumes using a `resource` step. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: template-level-volume- -spec: - entrypoint: generate-and-use-volume - templates: - - name: generate-and-use-volume - steps: - - - name: generate-volume - template: generate-volume - arguments: - parameters: - - name: pvc-size - # In a real-world example, this could be generated by a previous workflow step. - value: '1Gi' - - - name: generate - template: whalesay - arguments: - parameters: - - name: pvc-name - value: '{{steps.generate-volume.outputs.parameters.pvc-name}}' - - - name: print - template: print-message - arguments: - parameters: - - name: pvc-name - value: '{{steps.generate-volume.outputs.parameters.pvc-name}}' - - - name: generate-volume - inputs: - parameters: - - name: pvc-size - resource: - action: create - setOwnerReference: true - manifest: | - apiVersion: v1 - kind: PersistentVolumeClaim - metadata: - generateName: pvc-example- - spec: - accessModes: ['ReadWriteOnce', 'ReadOnlyMany'] - resources: - requests: - storage: '{{inputs.parameters.pvc-size}}' - outputs: - parameters: - - name: pvc-name - valueFrom: - jsonPath: '{.metadata.name}' - - - name: whalesay - inputs: - parameters: - - name: pvc-name - volumes: - - name: workdir - persistentVolumeClaim: - claimName: '{{inputs.parameters.pvc-name}}' - container: - image: docker/whalesay:latest - command: [sh, -c] - args: ["echo generating message in volume; cowsay hello world | tee /mnt/vol/hello_world.txt"] - volumeMounts: - - name: workdir - mountPath: /mnt/vol - - - name: print-message - inputs: - parameters: - - name: pvc-name - volumes: - - name: workdir - persistentVolumeClaim: - claimName: '{{inputs.parameters.pvc-name}}' - container: - image: alpine:latest - command: [sh, -c] - args: ["echo getting message from volume; find /mnt/vol; cat /mnt/vol/hello_world.txt"] - volumeMounts: - - name: workdir - mountPath: /mnt/vol - -``` - -## Suspending - -Workflows can be suspended by - -```sh -argo suspend WORKFLOW -``` - -Or by specifying a `suspend` step on the workflow: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: suspend-template- -spec: - entrypoint: suspend - templates: - - name: suspend - steps: - - - name: build - template: whalesay - - - name: approve - template: approve - - - name: delay - template: delay - - - name: release - template: whalesay - - - name: approve - suspend: {} - - - name: delay - suspend: - duration: "20" # Must be a string. Default unit is seconds. Could also be a Duration, e.g.: "2m", "6h", "1d" - - - name: whalesay - container: - image: docker/whalesay - command: [cowsay] - args: ["hello world"] -``` - -Once suspended, a Workflow will not schedule any new steps until it is resumed. It can be resumed manually by -```sh -argo resume WORKFLOW -``` -Or automatically with a `duration` limit as the example above. - -## Daemon Containers - -Argo workflows can start containers that run in the background (also known as `daemon containers`) while the workflow itself continues execution. Note that the daemons will be *automatically destroyed* when the workflow exits the template scope in which the daemon was invoked. Daemon containers are useful for starting up services to be tested or to be used in testing (e.g., fixtures). We also find it very useful when running large simulations to spin up a database as a daemon for collecting and organizing the results. The big advantage of daemons compared with sidecars is that their existence can persist across multiple steps or even the entire workflow. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: daemon-step- -spec: - entrypoint: daemon-example - templates: - - name: daemon-example - steps: - - - name: influx - template: influxdb # start an influxdb as a daemon (see the influxdb template spec below) - - - - name: init-database # initialize influxdb - template: influxdb-client - arguments: - parameters: - - name: cmd - value: curl -XPOST 'http://{{steps.influx.ip}}:8086/query' --data-urlencode "q=CREATE DATABASE mydb" - - - - name: producer-1 # add entries to influxdb - template: influxdb-client - arguments: - parameters: - - name: cmd - value: for i in $(seq 1 20); do curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d "cpu,host=server01,region=uswest load=$i" ; sleep .5 ; done - - name: producer-2 # add entries to influxdb - template: influxdb-client - arguments: - parameters: - - name: cmd - value: for i in $(seq 1 20); do curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d "cpu,host=server02,region=uswest load=$((RANDOM % 100))" ; sleep .5 ; done - - name: producer-3 # add entries to influxdb - template: influxdb-client - arguments: - parameters: - - name: cmd - value: curl -XPOST 'http://{{steps.influx.ip}}:8086/write?db=mydb' -d 'cpu,host=server03,region=useast load=15.4' - - - - name: consumer # consume intries from influxdb - template: influxdb-client - arguments: - parameters: - - name: cmd - value: curl --silent -G http://{{steps.influx.ip}}:8086/query?pretty=true --data-urlencode "db=mydb" --data-urlencode "q=SELECT * FROM cpu" - - - name: influxdb - daemon: true # start influxdb as a daemon - retryStrategy: - limit: 10 # retry container if it fails - container: - image: influxdb:1.2 - readinessProbe: # wait for readinessProbe to succeed - httpGet: - path: /ping - port: 8086 - - - name: influxdb-client - inputs: - parameters: - - name: cmd - container: - image: appropriate/curl:latest - command: ["/bin/sh", "-c"] - args: ["{{inputs.parameters.cmd}}"] - resources: - requests: - memory: 32Mi - cpu: 100m -``` - -Step templates use the `steps` prefix to refer to another step: for example `{{steps.influx.ip}}`. In DAG templates, the `tasks` prefix is used instead: for example `{{tasks.influx.ip}}`. - -## Sidecars - -A sidecar is another container that executes concurrently in the same pod as the main container and is useful in creating multi-container pods. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: sidecar-nginx- -spec: - entrypoint: sidecar-nginx-example - templates: - - name: sidecar-nginx-example - container: - image: appropriate/curl - command: [sh, -c] - # Try to read from nginx web server until it comes up - args: ["until `curl -G 'http://127.0.0.1/' >& /tmp/out`; do echo sleep && sleep 1; done && cat /tmp/out"] - # Create a simple nginx web server - sidecars: - - name: nginx - image: nginx:1.13 -``` - -In the above example, we create a sidecar container that runs nginx as a simple web server. The order in which containers come up is random, so in this example the main container polls the nginx container until it is ready to service requests. This is a good design pattern when designing multi-container systems: always wait for any services you need to come up before running your main code. - -## Hardwired Artifacts - -With Argo, you can use any container image that you like to generate any kind of artifact. In practice, however, we find certain types of artifacts are very common, so there is built-in support for git, http, gcs and s3 artifacts. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: hardwired-artifact- -spec: - entrypoint: hardwired-artifact - templates: - - name: hardwired-artifact - inputs: - artifacts: - # Check out the master branch of the argo repo and place it at /src - # revision can be anything that git checkout accepts: branch, commit, tag, etc. - - name: argo-source - path: /src - git: - repo: https://github.com/argoproj/argo-workflows.git - revision: "master" - # Download kubectl 1.8.0 and place it at /bin/kubectl - - name: kubectl - path: /bin/kubectl - mode: 0755 - http: - url: https://storage.googleapis.com/kubernetes-release/release/v1.8.0/bin/linux/amd64/kubectl - # Copy an s3 compatible artifact repository bucket (such as AWS, GCS and Minio) and place it at /s3 - - name: objects - path: /s3 - s3: - endpoint: storage.googleapis.com - bucket: my-bucket-name - key: path/in/bucket - accessKeySecret: - name: my-s3-credentials - key: accessKey - secretKeySecret: - name: my-s3-credentials - key: secretKey - container: - image: debian - command: [sh, -c] - args: ["ls -l /src /bin/kubectl /s3"] -``` - -## Kubernetes Resources - -In many cases, you will want to manage Kubernetes resources from Argo workflows. The resource template allows you to create, delete or updated any type of Kubernetes resource. - -```yaml -# in a workflow. The resource template type accepts any k8s manifest -# (including CRDs) and can perform any kubectl action against it (e.g. create, -# apply, delete, patch). -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: k8s-jobs- -spec: - entrypoint: pi-tmpl - templates: - - name: pi-tmpl - resource: # indicates that this is a resource template - action: create # can be any kubectl action (e.g. create, delete, apply, patch) - # The successCondition and failureCondition are optional expressions. - # If failureCondition is true, the step is considered failed. - # If successCondition is true, the step is considered successful. - # They use kubernetes label selection syntax and can be applied against any field - # of the resource (not just labels). Multiple AND conditions can be represented by comma - # delimited expressions. - # For more details: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ - successCondition: status.succeeded > 0 - failureCondition: status.failed > 3 - manifest: | #put your kubernetes spec here - apiVersion: batch/v1 - kind: Job - metadata: - generateName: pi-job- - spec: - template: - metadata: - name: pi - spec: - containers: - - name: pi - image: perl - command: ["perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"] - restartPolicy: Never - backoffLimit: 4 -``` - -**Note:** -Currently only a single resource can be managed by a resource template so either a `generateName` or `name` must be provided in the resource's metadata. - -Resources created in this way are independent of the workflow. If you want the resource to be deleted when the workflow is deleted then you can use [Kubernetes garbage collection](https://kubernetes.io/docs/concepts/workloads/controllers/garbage-collection/) with the workflow resource as an owner reference ([example](./k8s-owner-reference.yaml)). - -You can also collect data about the resource in output parameters (see more at [k8s-jobs.yaml](./k8s-jobs.yaml)) - -**Note:** -When patching, the resource will accept another attribute, `mergeStrategy`, which can either be `strategic`, `merge`, or `json`. If this attribute is not supplied, it will default to `strategic`. Keep in mind that Custom Resources cannot be patched with `strategic`, so a different strategy must be chosen. For example, suppose you have the [CronTab CustomResourceDefinition](https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#create-a-customresourcedefinition) defined, and the following instance of a CronTab: - -```yaml -apiVersion: "stable.example.com/v1" -kind: CronTab -spec: - cronSpec: "* * * * */5" - image: my-awesome-cron-image -``` - -This Crontab can be modified using the following Argo Workflow: - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: k8s-patch- -spec: - entrypoint: cront-tmpl - templates: - - name: cront-tmpl - resource: - action: patch - mergeStrategy: merge # Must be one of [strategic merge json] - manifest: | - apiVersion: "stable.example.com/v1" - kind: CronTab - spec: - cronSpec: "* * * * */10" - image: my-awesome-cron-image -``` - -## Docker-in-Docker Using Sidecars - -An application of sidecars is to implement Docker-in-Docker (DinD). DinD is useful when you want to run Docker commands from inside a container. For example, you may want to build and push a container image from inside your build container. In the following example, we use the docker:dind container to run a Docker daemon in a sidecar and give the main container access to the daemon. - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: sidecar-dind- -spec: - entrypoint: dind-sidecar-example - templates: - - name: dind-sidecar-example - container: - image: docker:19.03.13 - command: [sh, -c] - args: ["until docker ps; do sleep 3; done; docker run --rm debian:latest cat /etc/os-release"] - env: - - name: DOCKER_HOST # the docker daemon can be access on the standard port on localhost - value: 127.0.0.1 - sidecars: - - name: dind - image: docker:19.03.13-dind # Docker already provides an image for running a Docker daemon - command: [dockerd-entrypoint.sh] - env: - - name: DOCKER_TLS_CERTDIR # Docker TLS env config - value: "" - securityContext: - privileged: true # the Docker daemon can only run in a privileged container - # mirrorVolumeMounts will mount the same volumes specified in the main container - # to the sidecar (including artifacts), at the same mountPaths. This enables - # dind daemon to (partially) see the same filesystem as the main container in - # order to use features such as docker volume binding. - mirrorVolumeMounts: true -``` - -## Custom Template Variable Reference - -In this example, we can see how we can use the other template language variable reference (E.g: Jinja) in Argo workflow template. -Argo will validate and resolve only the variable that starts with Argo allowed prefix -{***"item", "steps", "inputs", "outputs", "workflow", "tasks"***} - -```yaml -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: custom-template-variable- -spec: - entrypoint: hello-hello-hello - - templates: - - name: hello-hello-hello - steps: - - - name: hello1 - template: whalesay - arguments: - parameters: [{name: message, value: "hello1"}] - - - name: hello2a - template: whalesay - arguments: - parameters: [{name: message, value: "hello2a"}] - - name: hello2b - template: whalesay - arguments: - parameters: [{name: message, value: "hello2b"}] - - - name: whalesay - inputs: - parameters: - - name: message - container: - image: docker/whalesay - command: [cowsay] - args: ["{{user.username}}"] - -``` - -## Continuous Integration Example - -Continuous integration is a popular application for workflows. Currently, Argo does not provide event triggers for automatically kicking off your CI jobs, but we plan to do so in the near future. Until then, you can easily write a cron job that checks for new commits and kicks off the needed workflow, or use your existing Jenkins server to kick off the workflow. - -A good example of a CI workflow spec is provided at https://github.com/argoproj/argo-workflows/tree/master/examples/influxdb-ci.yaml. Because it just uses the concepts that we've already covered and is somewhat long, we don't go into details here. +This has been moved to [the docs](https://argoproj.github.io/argo-workflows/walk-through/). \ No newline at end of file diff --git a/examples/artifact-gc-workflow.yaml b/examples/artifact-gc-workflow.yaml new file mode 100644 index 000000000000..72b03788cefa --- /dev/null +++ b/examples/artifact-gc-workflow.yaml @@ -0,0 +1,36 @@ +# This example shows how you can configure Artifact Garbage Collection for your Workflow. +# Here there are two artifacts - one is automatically deleted when the Workflow completes, and the other +# is deleted when the Workflow gets deleted. +# In this case, "OnWorkflowDeletion" is defined for all artifacts by default, but the "on-completion" artifact +# overrides that default strategy. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-gc- +spec: + entrypoint: main + artifactGC: + strategy: OnWorkflowDeletion # the overall strategy, which can be overridden + templates: + - name: main + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "hello world" > /tmp/on-completion.txt + echo "hello world" > /tmp/on-deletion.txt + outputs: + artifacts: + - name: on-completion + path: /tmp/on-completion.txt + s3: + key: on-completion.txt + artifactGC: + strategy: OnWorkflowCompletion # overriding the default strategy for this artifact + - name: on-deletion + path: /tmp/on-deletion.txt + s3: + key: on-deletion.txt diff --git a/examples/artifacts-workflowtemplate.yaml b/examples/artifacts-workflowtemplate.yaml new file mode 100644 index 000000000000..580d207c82cc --- /dev/null +++ b/examples/artifacts-workflowtemplate.yaml @@ -0,0 +1,135 @@ +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: artifacts + annotations: + workflows.argoproj.io/description: | + This example shows how to produce different types of artifact. +spec: + entrypoint: main + templates: + - name: main + volumes: + - name: in + emptyDir: { } + - name: out + emptyDir: { } + inputs: + artifacts: + - name: temps + path: /in/annual.csv + http: + url: https://datahub.io/core/global-temp/r/annual.csv + containerSet: + volumeMounts: + - mountPath: /in + name: in + - mountPath: /out + name: out + containers: + - name: setup + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + mkdir -p /out/assets + - name: gnuplot + image: remuslazar/gnuplot + dependencies: + - setup + args: + - -e + - | + set xlabel 'Year'; set ylabel 'Mean'; + set grid; + set datafile separator ','; + set term png size 600,400; + set output '/out/assets/global-temp.png'; + plot '/in/annual.csv' every 2::0 skip 1 using 2:3 title 'Global Temperature' with lines linewidth 2; + - name: main + image: argoproj/argosay:v2 + dependencies: + - setup + command: + - sh + - -c + args: + - | + cowsay "hello world" > /out/hello.txt + + cat > /out/hello.json < /out/assets/styles.css + + cat > /out/index.html < + + + + +

Global Temperature

+ + + + EOF + + cat > /out/malicious.html < + + +

This page attempts to run a script that shows an alert, but the Argo Server UI Content-Security-Policy will prevent that.

+

To check, open your Web Console and see that "Blocked script execution ... because the document's frame is sandboxed." (or similar) is printed.

+ + + EOF + outputs: + artifacts: + # saving single files + - name: text-file + path: /out/hello.txt + s3: + key: hello.txt + archive: + none: { } + # JSON files are shown with syntax highlighting. + - name: json-file + path: /out/hello.json + s3: + key: hello.json + archive: + none: { } + # CSS in not considered a known file type + - name: css-file + path: /out/assets/styles.css + s3: + key: styles.css + archive: + none: { } + # this artifact tries to run JavaScript + - name: malicious-file + path: /out/malicious.html + s3: + key: malicious.html + archive: + none: { } + # save a whole directory + - name: report + path: /out + s3: + key: report/ + archive: + none: { } + # this will be a tgz with a single file + - name: tgz-file + path: /out/hello.txt + s3: + key: file.tgz + # this will be a tgz with two entries, one dir and one file + - name: tgz-dir + path: /out + s3: + key: dir.tgz \ No newline at end of file diff --git a/examples/ci-workflowtemplate.yaml b/examples/ci-workflowtemplate.yaml new file mode 100644 index 000000000000..fb34af6c699b --- /dev/null +++ b/examples/ci-workflowtemplate.yaml @@ -0,0 +1,195 @@ +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: ci + annotations: + workflows.argoproj.io/description: | + This workflows builds and tests Argo Workflows. + + It demonstrates: + + * Cache restore and store. + * Publishing test reports. +spec: + arguments: + parameters: + - name: branch + value: master + + entrypoint: main + onExit: cache-store + + volumeClaimTemplates: + - metadata: + name: work + spec: + accessModes: [ ReadWriteOnce ] + resources: + requests: + storage: 64Mi + + templates: + - name: main + dag: + tasks: + - name: cache-restore + template: cache-restore + - name: clone + template: clone + - name: deps + template: deps + dependencies: + - clone + - cache-restore + - name: build + template: build + dependencies: + - deps + - name: test + template: test + dependencies: + - build + + - name: cache-restore + inputs: + artifacts: + - name: GOMODCACHE + path: /mnt/GOMODCACHE + optional: true + s3: + key: "github.com/golang/examples/{{workflow.parameters.branch}}/GOMODCACHE" + - name: GOCACHE + path: /mnt/GOCACHE + optional: true + s3: + key: "github.com/golang/examples/{{workflow.parameters.branch}}/GOCACHE" + container: + volumeMounts: + - mountPath: /go/pkg/mod + name: work + subPath: mod + - mountPath: /root/.cache/go-build + name: work + subPath: cache + image: golang:1.18 + workingDir: /go/src/github.com/golang/example + command: [ sh, -euxc ] + args: + - | + mkdir -p $(go env GOMODCACHE) + [ -e /mnt/GOMODCACHE ] && cp -Rf /mnt/GOMODCACHE $(go env GOMODCACHE) + mkdir -p $(go env GOCACHE) + [ -e /mnt/GOCACHE ] && cp -Rf /mnt/GOCACHE $(go env GOCACHE) + + - name: cache-store + container: + volumeMounts: + - mountPath: /go/pkg/mod + name: work + subPath: GOMODCACHE + - mountPath: /root/.cache/go-build + name: work + subPath: GOCACHE + image: golang:1.18 + workingDir: /go/src/github.com/golang/example + outputs: + artifacts: + - name: GOMODCACHE + path: /go/pkg/mod + optional: true + s3: + key: "github.com/golang/examples/{{workflow.parameters.branch}}/GOMODCACHE" + - name: GOCACHE + path: /root/.cache/go-build + optional: true + s3: + key: "github.com/golang/examples/{{workflow.parameters.branch}}/GOCACHE" + + - name: clone + container: + volumeMounts: + - mountPath: /go/src/github.com/golang/example + name: work + subPath: src + - mountPath: /go/pkg/mod + name: work + subPath: GOMODCACHE + - mountPath: /root/.cache/go-build + name: work + subPath: GOCACHE + image: golang:1.18 + workingDir: /go/src/github.com/golang/example + command: [ sh, -euxc ] + args: + - | + git clone -v -b "{{workflow.parameters.branch}}" --single-branch --depth 1 https://github.com/golang/example.git . + + - name: deps + container: + image: golang:1.18 + volumeMounts: + - mountPath: /go/src/github.com/golang/example + name: work + subPath: src + - mountPath: /go/pkg/mod + name: work + subPath: GOMODCACHE + - mountPath: /root/.cache/go-build + name: work + subPath: GOCACHE + workingDir: /go/src/github.com/golang/example + command: [ sh, -xuce ] + args: + - | + go mod download -x + + - name: build + container: + image: golang:1.18 + volumeMounts: + - mountPath: /go/src/github.com/golang/example + name: work + subPath: src + - mountPath: /go/pkg/mod + name: work + subPath: GOMODCACHE + - mountPath: /root/.cache/go-build + name: work + subPath: GOCACHE + workingDir: /go/src/github.com/golang/example + command: [ sh, -xuce ] + args: + - | + go build ./... + + - name: test + container: + image: golang:1.18 + volumeMounts: + - mountPath: /go/src/github.com/golang/example + name: work + subPath: src + - mountPath: /go/pkg/mod + name: work + subPath: GOMODCACHE + - mountPath: /root/.cache/go-build + name: work + subPath: GOCACHE + workingDir: /go/src/github.com/golang/example + command: [ sh, -euxc ] + args: + - | + go install github.com/jstemmer/go-junit-report@latest + go install github.com/alexec/junit2html@v0.0.2 + + trap 'cat test.out | go-junit-report | junit2html > test-report.html' EXIT + + go test -v ./... 2>&1 > test.out + outputs: + artifacts: + - name: test-report + path: /go/src/github.com/golang/example/test-report.html + archive: + none: { } + s3: + key: "{{workflow.parameters.branch}}/test-report.html" diff --git a/examples/ci.yaml b/examples/ci.yaml index b8e88927c6a8..50b7a8f4b161 100644 --- a/examples/ci.yaml +++ b/examples/ci.yaml @@ -5,7 +5,7 @@ metadata: spec: # entrypoint is the name of the template used as the starting point of the workflow entrypoint: ci-example - # the 'ci-example' template accepts an parameter 'revision', with a default of 'cfe12d6'. + # the 'ci-example' template accepts a parameter 'revision', with a default of 'cfe12d6'. # this can be overridden via argo CLI (e.g. `argo submit ci.yaml -p revision=0dea2d0`) arguments: parameters: diff --git a/examples/container-set-template/outputs-result-workflow.yaml b/examples/container-set-template/outputs-result-workflow.yaml index db946f8c7d3c..804c2e63ebf3 100644 --- a/examples/container-set-template/outputs-result-workflow.yaml +++ b/examples/container-set-template/outputs-result-workflow.yaml @@ -25,15 +25,17 @@ spec: parameters: - name: x value: "{{tasks.a.outputs.result}}" - dependencies: ["a"] + dependencies: [ "a" ] - name: group containerSet: containers: - name: main image: python:alpine3.6 - args: + command: + - python - -c + args: - | print("hi") @@ -43,5 +45,7 @@ spec: - name: x script: image: python:alpine3.6 + command: + - python source: | assert "{{inputs.parameters.x}}" == "hi" diff --git a/examples/daemon-nginx.yaml b/examples/daemon-nginx.yaml index 1b20e147c5ca..4ad13e03d97b 100644 --- a/examples/daemon-nginx.yaml +++ b/examples/daemon-nginx.yaml @@ -24,7 +24,7 @@ spec: readinessProbe: httpGet: path: / - port: "80" + port: 80 initialDelaySeconds: 2 timeoutSeconds: 1 diff --git a/examples/daemon-step.yaml b/examples/daemon-step.yaml index 1770f7560251..14f9e0c6f869 100644 --- a/examples/daemon-step.yaml +++ b/examples/daemon-step.yaml @@ -50,7 +50,7 @@ spec: readinessProbe: httpGet: path: /ping - port: "8086" + port: 8086 initialDelaySeconds: 5 timeoutSeconds: 1 diff --git a/examples/dag-daemon-task.yaml b/examples/dag-daemon-task.yaml index 79fa1e50f125..ba233150ae62 100644 --- a/examples/dag-daemon-task.yaml +++ b/examples/dag-daemon-task.yaml @@ -59,7 +59,7 @@ spec: readinessProbe: httpGet: path: /ping - port: "8086" + port: 8086 initialDelaySeconds: 5 timeoutSeconds: 1 diff --git a/examples/data-transformations.yaml b/examples/data-transformations.yaml index aa529201e778..73e1787029e2 100644 --- a/examples/data-transformations.yaml +++ b/examples/data-transformations.yaml @@ -3,8 +3,6 @@ apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: generateName: data-transformations- - labels: - workflows.argoproj.io/test: "true" annotations: workflows.argoproj.io/description: | This workflow demonstrates using a data template to list in an S3 bucket diff --git a/examples/influxdb-ci.yaml b/examples/influxdb-ci.yaml index 609438bb43bd..4b6b865d09a7 100644 --- a/examples/influxdb-ci.yaml +++ b/examples/influxdb-ci.yaml @@ -203,7 +203,7 @@ spec: readinessProbe: httpGet: path: /ping - port: "8086" + port: 8086 initialDelaySeconds: 5 timeoutSeconds: 1 command: ["/bin/sh", "-c"] diff --git a/examples/input-artifact-azure.yaml b/examples/input-artifact-azure.yaml new file mode 100644 index 000000000000..0510fe125a3e --- /dev/null +++ b/examples/input-artifact-azure.yaml @@ -0,0 +1,41 @@ +# This example demonstrates the loading of a hard-wired input artifact from an Azure +# Storage store. Azure guards access to the storage account with an access key which +# can be stored as a regular Kubernetes secret, and referenced in the workflow using +# secret selectors. If managed identities have been applied to the pods, then the +# parameter UseSDKCreds can be set to true, and managed identity authentication will +# be used instead of the secret. If a secret is used, the required secret can be created +# with the following command: +# $ kubectl create secret generic my-azure-credentials --from-literal=accountKey= +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: input-artifact-s3- +spec: + entrypoint: input-artifact-s3-example + templates: + - name: input-artifact-s3-example + inputs: + artifacts: + - name: my-art + path: /my-artifact + azure: + # For most people, the endpoint is in the form https://.blob.core.windows.net. + # In sovereign clouds, or Azure Stack clouds, the endpoint may be different. + endpoint: https://myazurestorageaccountname.blob.core.windows.net + # The name of the container within the storage account. + container: my-container + # The path (within the container) to the artifact + blob: path/in/container + # accountKeySecret is a secret selector. It references the k8s secret named + # 'my-azure-credentials'. This secret is expected to have the key + # 'accountKey', containing the Azure Storage account name and access key. + accountKeySecret: + name: my-azure-credentials + key: accountKey + # Optional: set UseSDKCreds to true and skip setting accountKeySecret if + # you are using environment variables to configure, or a Managed Identity. + # useSDKCreds: true + container: + image: debian:latest + command: [sh, -c] + args: ["ls -l /my-artifact"] diff --git a/examples/input-artifact-git.yaml b/examples/input-artifact-git.yaml index 09cb61aad53a..f8049b8c2e26 100644 --- a/examples/input-artifact-git.yaml +++ b/examples/input-artifact-git.yaml @@ -47,6 +47,11 @@ spec: # the refspec format. # fetch: refs/meta/* # fetch: refs/changes/* + # + # Single branch mode can be specified by providing a `singleBranch` and `branch` This mode + # is faster than passing in a revision, as it will only fetch the references to the given branch. + # singleBranch: true + # branch: my-branch container: image: golang:1.10 command: [sh, -c] diff --git a/examples/memoize-simple.yaml b/examples/memoize-simple.yaml index 0b28cb78378f..3459adffaed9 100644 --- a/examples/memoize-simple.yaml +++ b/examples/memoize-simple.yaml @@ -1,43 +1,36 @@ # This example demonstrates the ability to memoize a template. # Cache can be configured in configmap. For example, if the cache # ConfigMap looks like the following, then you should expect the node output -# retrieved from the cache to be "changed cache value". +# retrieved from the cache to be "different world". # Note that you also need to update `creationTimestamp` in -# `data.test-5` to be within the `memoize.maxAge` if `maxAge` is specified in the workflow spec. +# `data.hello` to be within the `memoize.maxAge` if `maxAge` is specified in the workflow spec. # # apiVersion: v1 # data: -# test-5: '{"nodeID":"memoized-hdckj","outputs":{"parameters":[{"name":"hello","value":"changed cache value","valueFrom":{"path":"/tmp/hello_world.txt"}}],"exitCode":"0"},"creationTimestamp":"2021-10-12T15:14:04Z","lastHitTimestamp":"2021-10-12T15:14:04Z"}' +# cache-key: '{"nodeID":"memoized-hdckj","outputs":{"parameters":[{"name":"hello","value":"different world","valueFrom":{"path":"/tmp/hello_world.txt"}}],"exitCode":"0"},"creationTimestamp":"2021-10-12T15:14:04Z","lastHitTimestamp":"2021-10-12T15:14:04Z"}' # kind: ConfigMap # metadata: # creationTimestamp: "2021-10-12T15:14:04Z" -# name: my-config +# name: cache-config #--- apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: - generateName: memoized-simple-workflow- + generateName: memoized- spec: entrypoint: whalesay - arguments: - parameters: - - name: message - value: test-5 templates: - name: whalesay - inputs: - parameters: - - name: message memoize: - key: "{{inputs.parameters.message}}" + key: "cache-key" maxAge: "10s" cache: configMap: - name: my-config + name: cache-config container: image: docker/whalesay:latest command: [sh, -c] - args: ["cowsay {{inputs.parameters.message}} > /tmp/hello_world.txt"] + args: ["cowsay world > /tmp/hello_world.txt"] outputs: parameters: - name: hello diff --git a/examples/output-artifact-azure.yaml b/examples/output-artifact-azure.yaml new file mode 100644 index 000000000000..aae6c18cf455 --- /dev/null +++ b/examples/output-artifact-azure.yaml @@ -0,0 +1,36 @@ +# This is an example of a workflow producing an Azure Storage output artifact which is saved +# to a hard-wired location. This is useful for workflows which want to publish results to a well +# known or pre-determined location. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: output-artifact-s3- +spec: + entrypoint: whalesay + templates: + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["cowsay hello world | tee /tmp/hello_world.txt"] + outputs: + artifacts: + - name: message + path: /tmp + azure: + # For most people, the endpoint is in the form https://.blob.core.windows.net. + # In sovereign clouds, or Azure Stack clouds, the endpoint may be different. + endpoint: https://myazurestorageaccountname.blob.core.windows.net + # The name of the container within the storage account. + container: my-container + # The path (within the container) to the artifact + blob: path/in/container/hello_world.txt.tgz + # accountKeySecret is a secret selector. It references the k8s secret named + # 'my-azure-credentials'. This secret is expected to have the key + # 'accountKey', containing the Azure Blob Storage account name and access key. + accountKeySecret: + name: my-azure-credentials + key: accountKey + # Optional: set UseSDKCreds to true and skip setting accountKeySecret if + # you are using environment variables to configure, or a Managed Identity. + # useSDKCreds: true diff --git a/examples/retry-conditional.yaml b/examples/retry-conditional.yaml index b11d3d8be2e8..3c1880a6b25a 100644 --- a/examples/retry-conditional.yaml +++ b/examples/retry-conditional.yaml @@ -1,4 +1,4 @@ -# Only retry if the retryStrategy.when condition is satisfied. In this example, retries will be made until a pod has +# Only retry if the retryStrategy.expression condition is satisfied. In this example, retries will be made until a pod has # exit code 2 or the limit of 10 is reached, whichever happens first. apiVersion: argoproj.io/v1alpha1 kind: Workflow diff --git a/examples/selected-executor-workflow.yaml b/examples/selected-executor-workflow.yaml index 68321e195f2f..dd39bfb83124 100644 --- a/examples/selected-executor-workflow.yaml +++ b/examples/selected-executor-workflow.yaml @@ -3,8 +3,6 @@ kind: Workflow metadata: generateName: selected-executor- labels: - # run this workflow as a part of our test suite - workflows.argoproj.io/test: "true" # use the pns executor, rather than the default (typically emissary) workflows.argoproj.io/container-runtime-executor: pns annotations: @@ -17,7 +15,7 @@ metadata: e.g. have a certain labels use certain executors. # this workflow will only run on workflows version v3.0.0 - workflows.argoproj.io/version: ">= 3.0.0" + workflows.argoproj.io/version: ">= 3.0.0 < 3.4.0" spec: entrypoint: main templates: diff --git a/examples/validator.go b/examples/validator.go index b6016405cbc3..42919f3b4b6c 100644 --- a/examples/validator.go +++ b/examples/validator.go @@ -4,6 +4,7 @@ import ( "fmt" "io/ioutil" "os" + "strings" "path/filepath" "github.com/xeipuuv/gojsonschema" @@ -61,12 +62,23 @@ func ValidateArgoYamlRecursively(fromPath string, skipFileNames []string) (map[s return err } + incorrectError := false if !result.Valid() { errorDescriptions := []string{} for _, err := range result.Errors() { - errorDescriptions = append(errorDescriptions, fmt.Sprintf("%s in %s", err.Description(), err.Context().String())) + // port should be port number or port reference string, using string port number will cause issue + // due swagger 2.0 limitation, we can only specify one data type (we use string, same as k8s api swagger) + if strings.HasSuffix(err.Field(), "httpGet.port") && err.Description() == "Invalid type. Expected: string, given: integer" { + incorrectError = true + continue + } else { + errorDescriptions = append(errorDescriptions, fmt.Sprintf("%s in %s", err.Description(), err.Context().String())) + } + } + + if !(incorrectError && len(errorDescriptions) == 1) { + failed[path] = errorDescriptions } - failed[path] = errorDescriptions } return nil }) diff --git a/examples/webhdfs-input-output-artifacts.yaml b/examples/webhdfs-input-output-artifacts.yaml new file mode 100644 index 000000000000..5ed04c3a0550 --- /dev/null +++ b/examples/webhdfs-input-output-artifacts.yaml @@ -0,0 +1,74 @@ +# This example demonstrates the usage of an input or output artifact via a webhdfs endpoint. +# The different providers support different ways of authentication. For Azure datalake, for example, this is done via an oauth2 token. +# On the other hand, SAP Hana datalake uses client authentication via certificates. +# Right now, authentication via certificates and via OAuth2 is supported. The input artifact shows an example for OAuth2 while the output artifact shows an example for using certificates. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: input-output-artifact-webhdfs- +spec: + entrypoint: input-output-artifact-webhdfs-example + templates: + - name: input-output-artifact-webhdfs-example + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + # webHDFS artifacts are accessed via an HTTP artifact + # below is an example on how to use authentication via oauth2 + # url: has to consist of the full webhdfs URL, including the operation and any desired query params + # oauth2.clientID: points to a kubernetes secret named oauth-sec with a data entry of "clientID" + # oauth2.clientSecret: points to a kubernetes secret named oauth-sec with a data entry of "clientSecret" + # oauth2.TokenURL: points to a kubernetes secret named oauth-sec with a data entry of "tokenURL" + # oauth2.scopes: necessary scopes for the oauth request + url: https://mywebhdfsprovider.com/webhdfs/v1/file.txt?op=OPEN + auth: + oauth2: + clientIDSecret: + name: oauth-sec + key: clientID + clientSecretSecret: + name: oauth-sec + key: clientSecret + tokenURLSecret: + name: oauth-sec + key: tokenURL + scopes: + - some + - scopes + # endpointParams can hold additional fields that may be needed in the oauth request + endpointParams: + - key: customkey + value: customvalue + # optional: headers which should be sent in the HTTP requests + headers: + - name: CustomHeader + value: CustomValue + outputs: + artifacts: + - name: my-art2 + path: /my-artifact + overwrite: true + http: + # below is an example on how to use authentication via certificates + # clientCert.clientCertSecret: points to a kubernetes secret named cert-sec with a data entry of "certificate.pem" + # clientCert.clientKeySecret: points to a kubernetes secret named cert-sec with a data entry of "key.pem" + # clientCertSecret and clientKeySecret secrets should contain the raw PEM contents of the tls certificate pair + url: https://mywebhdfsprovider.com/webhdfs/v1/file.txt?op=CREATE&overwrite=true + auth: + clientCert: + clientCertSecret: + name: cert-sec + key: certificate.pem + clientKeySecret: + name: cert-sec + key: key.pem + # optional: headers which should be sent in the HTTP requests + headers: + - name: CustomHeader + value: CustomValue + container: + image: debian:latest + command: [sh, -c] + args: ["cat /my-artifact"] diff --git a/go.mod b/go.mod index 6a120cadc4a4..00dc18d6fab0 100644 --- a/go.mod +++ b/go.mod @@ -1,125 +1,153 @@ module github.com/argoproj/argo-workflows/v3 -go 1.17 +go 1.18 require ( - cloud.google.com/go/storage v1.20.0 + cloud.google.com/go/storage v1.26.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0 + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1 github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible github.com/Masterminds/sprig/v3 v3.2.2 - github.com/TwinProduction/go-color v0.0.3 - github.com/aliyun/aliyun-oss-go-sdk v2.2.2+incompatible + github.com/TwiN/go-color v1.1.0 + github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible github.com/antonmedv/expr v1.9.0 - github.com/argoproj-labs/argo-dataflow v0.10.0 github.com/argoproj/argo-events v0.17.1-0.20220223155401-ddda8800f9f8 - github.com/argoproj/pkg v0.11.0 - github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f // indirect + github.com/argoproj/pkg v0.13.6 github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9 github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31 - github.com/coreos/go-oidc/v3 v3.1.0 + github.com/coreos/go-oidc/v3 v3.4.0 github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 - github.com/emicklei/go-restful v2.15.0+incompatible // indirect github.com/evanphx/json-patch v5.6.0+incompatible - github.com/fatih/structs v1.1.0 // indirect github.com/gavv/httpexpect/v2 v2.3.1 - github.com/go-openapi/jsonreference v0.19.6 + github.com/go-git/go-git/v5 v5.4.2 + github.com/go-jose/go-jose/v3 v3.0.0 + github.com/go-openapi/jsonreference v0.20.0 github.com/go-sql-driver/mysql v1.6.0 github.com/gogo/protobuf v1.3.2 github.com/golang/protobuf v1.5.2 + github.com/google/go-containerregistry v0.8.1-0.20220110151055-a61fd0a8e2bb + github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220411142604-2042cc9d6401 github.com/gorilla/handlers v1.5.1 github.com/gorilla/websocket v1.5.0 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/grpc-ecosystem/grpc-gateway v1.16.0 - github.com/imkira/go-interpol v1.1.0 // indirect github.com/klauspost/pgzip v1.2.5 - github.com/minio/minio-go/v7 v7.0.23 - github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b - github.com/prometheus/client_golang v1.12.1 + github.com/minio/minio-go/v7 v7.0.36 + github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.13.0 github.com/prometheus/client_model v0.2.0 - github.com/prometheus/common v0.32.1 + github.com/prometheus/common v0.37.0 github.com/robfig/cron/v3 v3.0.1 - github.com/satori/go.uuid v1.2.0 // indirect - github.com/sirupsen/logrus v1.8.1 + github.com/sethvargo/go-limiter v0.7.2 + github.com/sirupsen/logrus v1.9.0 github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 github.com/soheilhy/cmux v0.1.5 - github.com/spf13/cobra v1.4.0 + github.com/spf13/cobra v1.5.0 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.10.1 - github.com/stretchr/testify v1.7.1 - github.com/tidwall/gjson v1.14.0 - github.com/valyala/fasthttp v1.27.0 // indirect + github.com/spf13/viper v1.13.0 + github.com/stretchr/testify v1.8.0 + github.com/tidwall/gjson v1.14.3 github.com/valyala/fasttemplate v1.2.1 - github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonschema v1.2.0 - golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed - golang.org/x/net v0.0.0-20220225172249-27dd8689420f - golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c - golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 - google.golang.org/api v0.73.0 - google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6 - google.golang.org/grpc v1.45.0 + golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa + golang.org/x/exp v0.0.0-20220602145555-4a0574d9293f + golang.org/x/net v0.0.0-20220909164309-bea034e7d591 + golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094 + golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f + golang.org/x/time v0.0.0-20220224211638-0e9765cccd65 + google.golang.org/api v0.96.0 + google.golang.org/genproto v0.0.0-20220810155839-1856144b1d9c + google.golang.org/grpc v1.48.0 gopkg.in/go-playground/webhooks.v5 v5.17.0 gopkg.in/jcmturner/gokrb5.v5 v5.3.0 - gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 // indirect - k8s.io/api v0.23.3 - k8s.io/apimachinery v0.23.3 - k8s.io/client-go v0.23.3 - k8s.io/klog/v2 v2.40.1 - k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf + k8s.io/api v0.24.3 + k8s.io/apimachinery v0.24.3 + k8s.io/client-go v0.24.3 + k8s.io/gengo v0.0.0-20220613173612-397b4ae3bce7 + k8s.io/klog/v2 v2.60.1 + k8s.io/kube-openapi v0.0.0-20220627174259-011e075b9cb8 k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 sigs.k8s.io/yaml v1.3.0 upper.io/db.v3 v3.8.0+incompatible ) require ( - cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v1.5.0 // indirect - cloud.google.com/go/iam v0.1.1 // indirect + cloud.google.com/go v0.102.1 // indirect + cloud.google.com/go/compute v1.7.0 // indirect + cloud.google.com/go/iam v0.3.0 // indirect + github.com/Azure/azure-sdk-for-go v62.0.0+incompatible // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect - github.com/Azure/go-autorest/autorest v0.11.18 // indirect - github.com/Azure/go-autorest/autorest/adal v0.9.13 // indirect + github.com/Azure/go-autorest/autorest v0.11.24 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.18 // indirect + github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 // indirect + github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.1.1 // indirect - github.com/Microsoft/go-winio v0.5.0 // indirect + github.com/Microsoft/go-winio v0.5.1 // indirect github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect - github.com/PuerkitoBio/purell v1.1.1 // indirect - github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/acomagu/bufpipe v1.0.3 // indirect github.com/ajg/form v1.5.1 // indirect - github.com/andybalholm/brotli v1.0.2 // indirect + github.com/andybalholm/brotli v1.0.3 // indirect github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71 // indirect - github.com/aws/aws-sdk-go v1.42.50 // indirect + github.com/aws/aws-sdk-go v1.44.39 // indirect + github.com/aws/aws-sdk-go-v2 v1.16.2 // indirect + github.com/aws/aws-sdk-go-v2/config v1.15.3 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.11.2 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.3.10 // indirect + github.com/aws/aws-sdk-go-v2/service/ecr v1.15.0 // indirect + github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.12.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.11.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.16.3 // indirect + github.com/aws/smithy-go v1.11.2 // indirect + github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220228164355-396b2034c795 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect + github.com/chrismellard/docker-credential-acr-env v0.0.0-20220119192733-fe33c00cee21 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dimchansky/utfbom v1.1.1 // indirect + github.com/docker/cli v20.10.12+incompatible // indirect + github.com/docker/distribution v2.8.0+incompatible // indirect + github.com/docker/docker v20.10.12+incompatible // indirect + github.com/docker/docker-credential-helpers v0.6.4 // indirect github.com/dustin/go-humanize v1.0.0 // indirect + github.com/emicklei/go-restful/v3 v3.8.0 // indirect github.com/emirpasic/gods v1.12.0 // indirect - github.com/felixge/httpsnoop v1.0.2 // indirect - github.com/form3tech-oss/jwt-go v3.2.3+incompatible // indirect - github.com/fsnotify/fsnotify v1.5.1 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/felixge/httpsnoop v1.0.3 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect github.com/go-git/gcfg v1.5.0 // indirect github.com/go-git/go-billy/v5 v5.3.1 // indirect - github.com/go-git/go-git/v5 v5.4.2 - github.com/go-jose/go-jose/v3 v3.0.0 - github.com/go-logr/logr v1.2.2 // indirect + github.com/go-logr/logr v1.2.3 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect github.com/go-openapi/swag v0.19.15 // indirect + github.com/golang-jwt/jwt v3.2.1+incompatible // indirect + github.com/golang-jwt/jwt/v4 v4.3.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/google/go-cmp v0.5.7 // indirect + github.com/google/gnostic v0.5.7-v3refs // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220301182634-bfe2ffc6b6bd // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/uuid v1.3.0 // indirect - github.com/googleapis/gax-go/v2 v2.1.1 // indirect - github.com/googleapis/gnostic v0.5.5 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect + github.com/googleapis/gax-go/v2 v2.4.0 // indirect github.com/hashicorp/go-uuid v1.0.2 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/huandu/xstrings v1.3.2 // indirect github.com/imdario/mergo v0.3.12 // indirect + github.com/imkira/go-interpol v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jcmturner/gofork v1.0.0 // indirect @@ -127,65 +155,72 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect - github.com/klauspost/compress v1.14.2 // indirect - github.com/klauspost/cpuid v1.3.1 // indirect + github.com/klauspost/compress v1.15.9 // indirect + github.com/klauspost/cpuid/v2 v2.1.0 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect github.com/lib/pq v1.10.4 // indirect - github.com/magiconair/properties v1.8.5 // indirect + github.com/magiconair/properties v1.8.6 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect - github.com/minio/md5-simd v1.1.0 // indirect - github.com/minio/sha256-simd v0.1.1 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/minio/sha256-simd v1.0.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.4.3 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/moby/spdystream v0.2.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852 // indirect - github.com/pelletier/go-toml v1.9.4 // indirect - github.com/pkg/errors v0.9.1 // indirect + github.com/onsi/ginkgo v1.16.5 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.0.3-0.20220114050600-8b9d41f48198 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.5 // indirect + github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/procfs v0.7.3 // indirect - github.com/rs/xid v1.2.1 // indirect + github.com/prometheus/procfs v0.8.0 // indirect + github.com/rs/xid v1.4.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect github.com/shopspring/decimal v1.2.0 // indirect - github.com/spf13/afero v1.8.0 // indirect - github.com/spf13/cast v1.4.1 // indirect + github.com/spf13/afero v1.8.2 // indirect + github.com/spf13/cast v1.5.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/stretchr/objx v0.2.0 // indirect - github.com/subosito/gotenv v1.2.0 // indirect + github.com/stretchr/objx v0.4.0 // indirect + github.com/subosito/gotenv v1.4.1 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.30.0 // indirect + github.com/whilp/git-urls v1.0.0 // indirect github.com/xanzy/ssh-agent v0.3.1 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 // indirect github.com/yudai/gojsondiff v1.0.0 // indirect github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 // indirect go.opencensus.io v0.23.0 // indirect - golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5 // indirect + golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect golang.org/x/text v0.3.7 // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.27.1 // indirect + google.golang.org/protobuf v1.28.1 // indirect gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/ini.v1 v1.66.3 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/jcmturner/aescts.v1 v1.0.1 // indirect gopkg.in/jcmturner/dnsutils.v1 v1.0.1 // indirect gopkg.in/jcmturner/rpc.v0 v0.0.2 // indirect + gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 // indirect gopkg.in/square/go-jose.v2 v2.6.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect moul.io/http2curl v1.0.1-0.20190925090545-5cd742060b0e // indirect - sigs.k8s.io/controller-runtime v0.11.1 // indirect sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.1 // indirect ) -require github.com/whilp/git-urls v1.0.0 // indirect - -replace github.com/go-git/go-git/v5 => github.com/argoproj-labs/go-git/v5 v5.4.3 +replace github.com/go-git/go-git/v5 => github.com/argoproj-labs/go-git/v5 v5.4.4 diff --git a/go.sum b/go.sum index e362a69459d9..cfea20da2ee0 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,6 @@ +4d63.com/gochecknoglobals v0.1.0/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= +bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= +bitbucket.org/creachadair/shell v0.0.6/go.mod h1:8Qqi/cYk7vPnsOePHroKXDJYmb5x7ENhtiFtfZq8K+M= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -12,6 +15,7 @@ cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6 cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.60.0/go.mod h1:yw2G51M9IfRboUH61Us8GqCeF1PzPblB823Mn2q2eAU= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= @@ -29,9 +33,10 @@ cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= -cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= +cloud.google.com/go v0.102.1 h1:vpK6iQWv/2uUeFJth4/cBHsQAGjn1iIE6AAlxipRaA0= +cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -40,215 +45,285 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/compute v1.7.0 h1:v/k9Eueb8aAJ0vZuxKMrgm6kPhCLZU9HxFU+AFDs9Uk= +cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= -cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= -cloud.google.com/go/kms v1.1.0/go.mod h1:WdbppnCDMDpOvoYBMn1+gNmOeEoZYqAv+HeuKARGCXI= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.5.0/go.mod h1:ZEwJccE3z93Z2HWvstpri00jOg7oO4UZDtKhwDwqF0w= +cloud.google.com/go/spanner v1.7.0/go.mod h1:sd3K2gZ9Fd0vMPLXzeCrF6fq4i63Q7aTLW/lBIfBkIk= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.20.0 h1:kv3rQ3clEQdxqokkCCgQo+bxPqcuXiROjxvnKb8Oqdk= -cloud.google.com/go/storage v1.20.0/go.mod h1:TiC1o6FxNCG8y5gB7rqCsFZCIYPMPZCO81ppOoEPLGI= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/storage v1.26.0 h1:lYAGjknyDJirSzfwUlkv4Nsnj7od7foxQNH/fqZqles= +cloud.google.com/go/storage v1.26.0/go.mod h1:mk/N7YwIKEWyTvXAWQCIeiCTdLoRH6Pd5xmSnolQLTI= +contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas= -github.com/Azure/azure-event-hubs-go/v3 v3.3.17/go.mod h1:R5H325+EzgxcBDkUerEwtor7ZQg77G7HiOTwpcuIVXY= -github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= -github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= -github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go v52.6.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= -github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= -github.com/Azure/go-ansiterm v0.0.0-20210608223527-2377c96fe795/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Antonboom/errname v0.1.5/go.mod h1:DugbBstvPFQbv/5uLcRRzfrNqKE9tVdVCqWCLp6Cifo= +github.com/Antonboom/nilnil v0.1.0/go.mod h1:PhHLvRPSghY5Y7mX4TW+BHZQYo1A8flE5H20D3IPZBo= +github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v46.4.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v62.0.0+incompatible h1:8N2k27SYtc12qj5nTsuFMFJPZn5CGmgMWqTy4y9I7Jw= +github.com/Azure/azure-sdk-for-go v62.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0 h1:sVPhtT2qjO86rTUaWMr4WoES4TkjGnzcioXcnHV9s5k= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0 h1:QkAcEIAKbNL4KoFr4SathZPhDhF4mVwpBMFlYjyAqy8= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0/go.mod h1:bhXu1AjYL+wutSL/kpSq6s7733q2Rb0yuot9Zgfqa/0= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0 h1:jp0dGvZ7ZK0mgqnTSClMxa5xuRL7NZgHameVYF6BurY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1 h1:QSdcrd/UFJv6Bp/CfoVf2SrENpFn9P6Yh8yb+xNhYMM= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1/go.mod h1:eZ4g6GUvXiGulfIbbhh1Xr4XwUYaYaWMqzGD/284wCA= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= -github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= -github.com/Azure/go-autorest/autorest v0.11.18 h1:90Y4srNYrwOtAgVo3ndrQkTYn6kf1Eg/AjTFJ8Is2aM= +github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.6/go.mod h1:V6p3pKZx1KKkJubbxnDWrzNhEIfOy/pTGasLqzHIPHs= +github.com/Azure/go-autorest/autorest v0.11.8/go.mod h1:V6p3pKZx1KKkJubbxnDWrzNhEIfOy/pTGasLqzHIPHs= github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= +github.com/Azure/go-autorest/autorest v0.11.24 h1:1fIGgHKqVm54KIPT+q8Zmd1QlVsmHqeUGso5qm2BqqE= +github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsCswgme4iYf6rfSKUDzbCc= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= -github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= -github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= -github.com/Azure/go-autorest/autorest/adal v0.9.13 h1:Mp5hbtOePIzM8pJVRa3YLrWWmZtoxRXqUEzCfJt3+/Q= +github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= +github.com/Azure/go-autorest/autorest/adal v0.9.4/go.mod h1:/3SMAM86bP6wC9Ev35peQDUeqFZBMH07vvUOmg4z/fE= +github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= -github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= -github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw= +github.com/Azure/go-autorest/autorest/adal v0.9.18 h1:kLnPsRjzZZUF3K5REu/Kc+qMQrvuza2bwSnNdhmzLfQ= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.2/go.mod h1:q98IH4qgc3eWM4/WOeR5+YPmBuy8Lq0jNRDwSM0CuFk= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 h1:P6bYXFoao05z5uhOQzbC3Qd8JqF3jUoocoTeIxkp2cA= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.11/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.1/go.mod h1:JfDgiIO1/RPu6z42AdQTyjOoCM2MFhLqSBDvMEkDgcg= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 h1:0W/yGmFdTIT77fvdlGZ0LMISoLHFJ7Tx4U0yeB+uFs4= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.5/go.mod h1:ADQAXrkgm7acgWVUNamOgh8YNrv4p27l3Wc55oVfpzg= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= -github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= +github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk= github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= -github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= +github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1 h1:BWe8a+f/t+7KY7zH2mqygeUD0t8hNFXe08p1Pb3/jKE= +github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= -github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= +github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1:1G1pk05UrOh0NlF1oeaaix1x8XzrfjIDK47TY0Zehcw= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= +github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/Masterminds/sprig/v3 v3.2.0/go.mod h1:tWhwTbUTndesPNeF0C900vKoq283u6zp4APT9vaF3SI= github.com/Masterminds/sprig/v3 v3.2.2 h1:17jRggJu518dr3QaafizSXOjKYp94wKfABxUmyxvxX8= github.com/Masterminds/sprig/v3 v3.2.2/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= +github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= +github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= +github.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= -github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU= +github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.1 h1:aPJp2QD7OOrhO5tQXqQoGSJc+DjDtWTGLOmNyAm6FgY= +github.com/Microsoft/go-winio v0.5.1/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= +github.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= +github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ= +github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8= +github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= +github.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00= +github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600= +github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= +github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= +github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= -github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 h1:YoJbenK9C67SkzkDfmQuVln04ygHj3vjZfd9FL+GmQQ= github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/Shopify/sarama v1.31.1/go.mod h1:99E1xQ1Ql2bYcuJfwdXY3cE17W8+549Ty8PG/11BDqY= -github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c= -github.com/TwinProduction/go-color v0.0.3 h1:2asEWaZo0Oh/FCib+KqHmEoideK8fMyX58JujC/dbDA= -github.com/TwinProduction/go-color v0.0.3/go.mod h1:5hWpSyT+mmKPjCwPNEruBW5Dkbs/2PwOuU468ntEXNQ= +github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= +github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= +github.com/TwiN/go-color v1.1.0 h1:yhLAHgjp2iAxmNjDiVb6Z073NE65yoaPlcki1Q22yyQ= +github.com/TwiN/go-color v1.1.0/go.mod h1:aKVf4e1mD4ai2FtPifkDPP5iyoCwiK08YGzGwerjKo0= github.com/UnnoTed/fileb0x v1.1.4/go.mod h1:X59xXT18tdNk/D6j+KZySratBsuKJauMtVuJ9cgOiZs= github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk= github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= -github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/ahmetb/gen-crd-api-reference-docs v0.3.0/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8= github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= -github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/aliyun/aliyun-oss-go-sdk v2.2.2+incompatible h1:9gWa46nstkJ9miBReJcN8Gq34cBFbzSpQZVVT9N09TM= -github.com/aliyun/aliyun-oss-go-sdk v2.2.2+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= -github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= -github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= -github.com/andybalholm/brotli v1.0.2 h1:JKnhI/XQ75uFBTiuzXpzFrUriDPiZjlOSzh6wXogP0E= +github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= +github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= +github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible h1:QoRMR0TCctLDqBCMyOu1eXdZyMw3F7uGA9qPn2J4+R8= +github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM= +github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= +github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/antonmedv/expr v1.9.0 h1:j4HI3NHEdgDnN9p6oI6Ndr0G5QryMY0FNxT4ONrFDGU= github.com/antonmedv/expr v1.9.0/go.mod h1:5qsM3oLGDND7sDmQGDXHkYfkjYMUX14qsgqmHhwGEk8= -github.com/apache/openwhisk-client-go v0.0.0-20190915054138-716c6f973eb2/go.mod h1:jLLKYP7+1+LFlIJW1n9U1gqeveLM1HIwa4ZHNOFxjPw= -github.com/apache/pulsar-client-go v0.1.1/go.mod h1:mlxC65KL1BLhGO2bnT9zWMttVzR2czVPb27D477YpyU= -github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= -github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= -github.com/argoproj-labs/argo-dataflow v0.10.0 h1:sqMNzjVKXpa653pwwZ4uU3vUoBew8TjchMRhdIYhUuA= -github.com/argoproj-labs/argo-dataflow v0.10.0/go.mod h1:tCCD3s0ub5/PB59TpoKGk2N2XPkFFs8a8Ge8qBK8YjQ= -github.com/argoproj-labs/go-git/v5 v5.4.3 h1:BXJaKSYj5sK2k8LQENV6T7cs+y1uX3BDOPAdzZstkmA= -github.com/argoproj-labs/go-git/v5 v5.4.3/go.mod h1:Lv1K45bcCda9jDMEZCGCVuXSGdBaSGAXUvptnVtaEsA= +github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= +github.com/argoproj-labs/go-git/v5 v5.4.4 h1:xXa015ZCEElgNMxaevPsK68oAsbjpmNGo9W+os1oeBU= +github.com/argoproj-labs/go-git/v5 v5.4.4/go.mod h1:Lv1K45bcCda9jDMEZCGCVuXSGdBaSGAXUvptnVtaEsA= github.com/argoproj/argo-events v0.17.1-0.20220223155401-ddda8800f9f8 h1:LqF/eUExbdTg7MEHUJt4DfZIg5hJN5lneybM7u7MbWI= github.com/argoproj/argo-events v0.17.1-0.20220223155401-ddda8800f9f8/go.mod h1:AhwDnZwUrrwPgN0CYFMfZQ7liL+G+iL4ujNiLMv2l58= -github.com/argoproj/pkg v0.11.0 h1:kho8cjBRe/K7tFiMfNG7vnF6VBy9+p0idV21f9bbUO4= -github.com/argoproj/pkg v0.11.0/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= +github.com/argoproj/pkg v0.13.6 h1:36WPD9MNYECHcO1/R1pj6teYspiK7uMQLCgLGft2abM= +github.com/argoproj/pkg v0.13.6/go.mod h1:I698DoJBKuvNFaixh4vFl2C88cNIT1WS7KCbz5ewyF8= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg= github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= -github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= -github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= -github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= +github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71 h1:m3N1Fv5vE5IcxuTOGFGGV0grrVFHV8UY2SV0wSBXAC8= github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71/go.mod h1:/ynarkO/43wP/JM2Okn61e8WFMtdbtA8he7GJxW+SFM= -github.com/aws/aws-sdk-go v1.33.16/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= -github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= -github.com/aws/aws-sdk-go v1.42.50 h1:FA5pbpkLz2fdnMt+AWyHnNaIA269rqr/sYAe3WKCYN4= -github.com/aws/aws-sdk-go v1.42.50/go.mod h1:OGr6lGMAKGlG9CVrYnWYDKIyb829c6EVBRjxqjmPepc= -github.com/aws/aws-sdk-go-v2 v1.9.0/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= -github.com/aws/aws-sdk-go-v2/config v1.7.0/go.mod h1:w9+nMZ7soXCe5nT46Ri354SNhXDQ6v+V5wqDjnZE+GY= -github.com/aws/aws-sdk-go-v2/credentials v1.4.0/go.mod h1:dgGR+Qq7Wjcd4AOAW5Rf5Tnv3+x7ed6kETXyS9WCuAY= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.0/go.mod h1:CpNzHK9VEFUCknu50kkB8z58AH2B5DvPP7ea1LHve/Y= -github.com/aws/aws-sdk-go-v2/internal/ini v1.2.2/go.mod h1:BQV0agm+JEhqR+2RT5e1XTFIDcAAV0eW6z2trp+iduw= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0/go.mod h1:v8ygadNyATSm6elwJ/4gzJwcFhri9RqS8skgHKiwXPU= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.0/go.mod h1:R1KK+vY8AfalhG1AOu5e35pOD2SdoPKQCFLTvnxiohk= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.6.0/go.mod h1:LKb3cKNQIMh+itGnEpKGcnL/6OIjPZqrtYah1w5f+3o= -github.com/aws/aws-sdk-go-v2/service/s3 v1.14.0/go.mod h1:Qit9H3zjAmF7CLHOkrepE9b2ndX/2l3scstsM5g2jSk= -github.com/aws/aws-sdk-go-v2/service/sso v1.4.0/go.mod h1:+1fpWnL96DL23aXPpMGbsmKe8jLTEfbjuQoA4WS1VaA= -github.com/aws/aws-sdk-go-v2/service/sts v1.7.0/go.mod h1:0qcSMCyASQPN2sk/1KQLQ2Fh6yq8wm0HSDAimPhzCoM= -github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= -github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f h1:ZNv7On9kyUzm7fvRZumSyy/IUiSC7AzL0I1jKKtwooA= -github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= -github.com/beefsack/go-rate v0.0.0-20180408011153-efa7637bb9b6/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= -github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= +github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.25.37/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.36.30/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/aws/aws-sdk-go v1.44.39 h1:pMxYLqnuDidT0ZTDAhYC66fb3W3Yc+oShmfzEL4fTDI= +github.com/aws/aws-sdk-go v1.44.39/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= +github.com/aws/aws-sdk-go-v2 v1.7.1/go.mod h1:L5LuPC1ZgDr2xQS7AmIec/Jlc7O/Y1u2KxJyNVab250= +github.com/aws/aws-sdk-go-v2 v1.14.0/go.mod h1:ZA3Y8V0LrlWj63MQAnRHgKf/5QB//LSZCPNWlWrNGLU= +github.com/aws/aws-sdk-go-v2 v1.16.2 h1:fqlCk6Iy3bnCumtrLz9r3mJ/2gUT0pJ0wLFVIdWh+JA= +github.com/aws/aws-sdk-go-v2 v1.16.2/go.mod h1:ytwTPBG6fXTZLxxeeCCWj2/EMYp/xDUgX+OET6TLNNU= +github.com/aws/aws-sdk-go-v2/config v1.5.0/go.mod h1:RWlPOAW3E3tbtNAqTwvSW54Of/yP3oiZXMI0xfUdjyA= +github.com/aws/aws-sdk-go-v2/config v1.15.3 h1:5AlQD0jhVXlGzwo+VORKiUuogkG7pQcLJNzIzK7eodw= +github.com/aws/aws-sdk-go-v2/config v1.15.3/go.mod h1:9YL3v07Xc/ohTsxFXzan9ZpFpdTOFl4X65BAKYaz8jg= +github.com/aws/aws-sdk-go-v2/credentials v1.3.1/go.mod h1:r0n73xwsIVagq8RsxmZbGSRQFj9As3je72C2WzUIToc= +github.com/aws/aws-sdk-go-v2/credentials v1.11.2 h1:RQQ5fzclAKJyY5TvF+fkjJEwzK4hnxQCLOu5JXzDmQo= +github.com/aws/aws-sdk-go-v2/credentials v1.11.2/go.mod h1:j8YsY9TXTm31k4eFhspiQicfXPLZ0gYXA50i4gxPE8g= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.3.0/go.mod h1:2LAuqPx1I6jNfaGDucWfA2zqQCYCOMCDHiCOciALyNw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.3 h1:LWPg5zjHV9oz/myQr4wMs0gi4CjnDN/ILmyZUFYXZsU= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.3/go.mod h1:uk1vhHHERfSVCUnqSqz8O48LBYDSC+k6brng09jcMOk= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.5/go.mod h1:2hXc8ooJqF2nAznsbJQIn+7h851/bu8GVC80OVTTqf8= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9 h1:onz/VaaxZ7Z4V+WIN9Txly9XLTmoOh1oJ8XcAC3pako= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9/go.mod h1:AnVH5pvai0pAF4lXRq0bmhbes1u9R8wTE+g+183bZNM= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.3.0/go.mod h1:miRSv9l093jX/t/j+mBCaLqFHo9xKYzJ7DGm1BsGoJM= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.3 h1:9stUQR/u2KXU6HkFJYlqnZEjBnbgrVbG6I5HN09xZh0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.3/go.mod h1:ssOhaLpRlh88H3UmEcsBoVKq309quMvm3Ds8e9d4eJM= +github.com/aws/aws-sdk-go-v2/internal/ini v1.1.1/go.mod h1:Zy8smImhTdOETZqfyn01iNOe0CNggVbPjCajyaz6Gvg= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.10 h1:by9P+oy3P/CwggN4ClnW2D4oL91QV7pBzBICi1chZvQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.10/go.mod h1:8DcYQcz0+ZJaSxANlHIsbbi6S+zMwjwdDqwW3r9AzaE= +github.com/aws/aws-sdk-go-v2/service/ecr v1.4.1/go.mod h1:FglZcyeiBqcbvyinl+n14aT/EWC7S1MIH+Gan2iizt0= +github.com/aws/aws-sdk-go-v2/service/ecr v1.15.0 h1:lY2Z2sBP+zSbJ6CvvmnFgPcgknoQ0OJV88AwVetRRFk= +github.com/aws/aws-sdk-go-v2/service/ecr v1.15.0/go.mod h1:4zYI85WiYDhFaU1jPFVfkD7HlBcdnITDE3QxDwy4Kus= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.4.1/go.mod h1:eD5Eo4drVP2FLTw0G+SMIPWNWvQRGGTtIZR2XeAagoA= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.12.0 h1:LsqBpyRofMG6eDs6YGud6FhdGyIyXelAasPOZ6wWLro= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.12.0/go.mod h1:IArQ3IBR00FkuraKwudKZZU32OxJfdTdwV+W5iZh3Y4= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.1/go.mod h1:zceowr5Z1Nh2WVP8bf/3ikB41IZW59E4yIYbg+pC6mw= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.3 h1:Gh1Gpyh01Yvn7ilO/b/hr01WgNpaszfbKMUgqM186xQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.3/go.mod h1:wlY6SVjuwvh3TVRpTqdy4I1JpBFLX4UGeKZdWntaocw= +github.com/aws/aws-sdk-go-v2/service/sso v1.3.1/go.mod h1:J3A3RGUvuCZjvSuZEcOpHDnzZP/sKbhDWV2T1EOzFIM= +github.com/aws/aws-sdk-go-v2/service/sso v1.11.3 h1:frW4ikGcxfAEDfmQqWgMLp+F1n4nRo9sF39OcIb5BkQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.11.3/go.mod h1:7UQ/e69kU7LDPtY40OyoHYgRmgfGM4mgsLYtcObdveU= +github.com/aws/aws-sdk-go-v2/service/sts v1.6.0/go.mod h1:q7o0j7d7HrJk/vr9uUt3BVRASvcU7gYZB9PUgPiByXg= +github.com/aws/aws-sdk-go-v2/service/sts v1.16.3 h1:cJGRyzCSVwZC7zZZ1xbx9m32UnrKydRYhOvcD1NYP9Q= +github.com/aws/aws-sdk-go-v2/service/sts v1.16.3/go.mod h1:bfBj0iVmsUyUg4weDB4NxktD9rDGeKSVWnjTnwbx9b8= +github.com/aws/smithy-go v1.6.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= +github.com/aws/smithy-go v1.11.0/go.mod h1:3xHYmszWVx2c0kIwQeEVf9uSm4fYZt67FBJnwub1bgM= +github.com/aws/smithy-go v1.11.2 h1:eG/N+CcUMAvsdffgMvjMKwfyDzIkjM6pfxMJ8Mzc6mE= +github.com/aws/smithy-go v1.11.2/go.mod h1:3xHYmszWVx2c0kIwQeEVf9uSm4fYZt67FBJnwub1bgM= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220228164355-396b2034c795 h1:IWeCJzU+IYaO2rVEBlGPTBfe90cmGXFTLdhUFlzDGsY= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220228164355-396b2034c795/go.mod h1:8vJsEZ4iRqG+Vx6pKhWK6U00qcj0KC37IsfszMkY6UE= +github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= +github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= +github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/blizzy78/varnamelen v0.3.0/go.mod h1:hbwRdBvoBqxk34XyQ6HA0UH3G0/1TKuv5AC4eaBT0Ec= github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9 h1:mV+hh0rMjzrhg7Jc/GKwpa+y/0BMHGOHdM9yY1GYyFI= github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9/go.mod h1:nDeXEIaeDV+mAK1gBD3/RJH67DYPC0GdaznWN7sB07s= github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= -github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= -github.com/bombsimon/logrusr/v2 v2.0.1/go.mod h1:ByVAX+vHdLGAfdroiMg6q0zgq2FODY2lc5YJvzmOJio= -github.com/boynton/repl v0.0.0-20170116235056-348863958e3e/go.mod h1:Crc/GCZ3NXDVCio7Yr0o+SSrytpcFhLmVCIzi0s49t4= -github.com/bradleyfalzon/ghinstallation/v2 v2.0.4/go.mod h1:B40qPqJxWE0jDZgOR1JmaMy+4AY1eBP+IByOvqyAKp0= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= +github.com/breml/bidichk v0.1.1/go.mod h1:zbfeitpevDUGI7V91Uzzuwrn4Vls8MoBMrwtt78jmso= +github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= +github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= +github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= +github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= +github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= +github.com/butuzov/ireturn v0.1.1/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= -github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= -github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charithe/durationcheck v0.0.9/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= +github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= +github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= +github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20220119192733-fe33c00cee21 h1:XlpL9EHrPOBJMLDDOf35/G4t5rGAFNNAZQ3cDcWavtc= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20220119192733-fe33c00cee21/go.mod h1:Zlre/PVxuSI9y6/UV4NwGixQ48RHQDSPiUkofr6rbMU= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= +github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc= +github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= +github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= +github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudevents/sdk-go/v2 v2.8.0/go.mod h1:GpCBmUj7DIRiDhVvsK5d6WCbgTWs8DxAWTRtAwQmIXs= -github.com/cloudfoundry/jibber_jabber v0.0.0-20151120183258-bcc4c8345a21/go.mod h1:po7NpZ/QiTKzBKyrsEAxwnTamCoh8uDk/egRpQ7siIc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -259,66 +334,180 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/datadriven v0.0.0-20200714090401-bf6692d28da5/go.mod h1:h6jFvWxBdQXxjopDMZyH2UVceIRfR84bdzbkoKrsWNo= -github.com/cockroachdb/errors v1.2.4/go.mod h1:rQD95gz6FARkaKkQXUksEje/d9a6wBJoCr5oaCLELYA= -github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f/go.mod h1:i/u985jwjWRlyHXQbwatDASoW0RMlZ/3i9yJHE2xLkI= -github.com/colinmarc/hdfs v1.1.4-0.20180802165501-48eb8d6c34a9/go.mod h1:0DumPviB681UcSuJErAbDIOx6SIaJWj463TymfZG02I= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31 h1:ow7T77012NSZVW0uOWoQxz3yj9fHKYeZ4QmNrMtWMbM= github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31/go.mod h1:vSBumefK4HA5uiRSwNP+3ofgrEoScpCS2MMWcWXEuQ4= -github.com/confluentinc/confluent-kafka-go v1.8.2/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg= +github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= +github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= +github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= +github.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= +github.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E= +github.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= +github.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= +github.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI= +github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko= +github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= +github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= +github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= +github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= +github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= +github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= +github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= +github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE= +github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw= +github.com/containerd/console v1.0.2/go.mod h1:ytZPjGgY2oeTkAONYafi2kSj0aYggsf8acV1PGKCbzQ= +github.com/containerd/containerd v1.2.10/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.1-0.20191213020239-082f7e3aed57/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ= +github.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU= +github.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI= +github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s= +github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s= +github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo= +github.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y= +github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= +github.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM= +github.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= +github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= +github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= +github.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= +github.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= +github.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= +github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU= +github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= +github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= +github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= +github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= +github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= +github.com/containerd/go-runc v1.0.0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= +github.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak9TYCG3juvb0= +github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA= +github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= +github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= +github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= +github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= +github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= +github.com/containerd/stargz-snapshotter/estargz v0.10.1 h1:hd1EoVjI2Ax8Cr64tdYqnJ4i4pZU49FkEf5kU8KxQng= +github.com/containerd/stargz-snapshotter/estargz v0.10.1/go.mod h1:aE5PCyhFMwR8sbrErO5eM2GcvkyXTTJremG883D4qF0= +github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= +github.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= +github.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8= +github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= +github.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= +github.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ= +github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= +github.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk= +github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg= +github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= +github.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw= +github.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y= +github.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v0.0.0-20210324211415-d5c4544f0433/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= +github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= +github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= +github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= +github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= +github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk= -github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-oidc/v3 v3.1.0 h1:6avEvcdvTa1qYsOZ6I5PRkSYHzpTNWgKYmaJfaYbrRw= -github.com/coreos/go-oidc/v3 v3.1.0/go.mod h1:rEJ/idjfUyfkBit1eI1fvyr+64/g9dcKpAm8MJMesvo= +github.com/coreos/go-oidc/v3 v3.4.0 h1:xz7elHb/LDwm/ERpwHd+5nb7wFHL32rsr6bBOgaeu6g= +github.com/coreos/go-oidc/v3 v3.4.0/go.mod h1:eHUXhZtXPQLgEaDrOVTgwbgmz1xGOkJNye6h3zkD2Pw= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20161114122254-48702e0da86b/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190620071333-e64a0ec8b42a/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= +github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= +github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= +github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= +github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= +github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= +github.com/daixiang0/gci v0.2.9/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= +github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= github.com/dave/jennifer v1.4.1/go.mod h1:7jEdnm+qBcxl8PC0zyp7vxcpSRnzXSt9r39tpTVGlwA= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= +github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= +github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= +github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= -github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= -github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dnaeon/go-vcr v1.1.0 h1:ReYa/UBrRyQdant9B4fNHGoCNKw6qh6P0fsdGmZpR7c= +github.com/docker/cli v20.10.12+incompatible h1:lZlz0uzG+GH+c0plStMUdF/qk3ppmgnswpR5EbqzVGA= +github.com/docker/cli v20.10.12+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= +github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.8.0+incompatible h1:l9EaZDICImO1ngI+uTifW+ZYvvz7fKISBAKpg+MbWbY= +github.com/docker/distribution v2.8.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v20.10.12+incompatible h1:CEeNmFM0QZIsJCZKMkZx0ZcahTiewkrgiwfYD+dfl1U= +github.com/docker/docker v20.10.12+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= +github.com/docker/docker-credential-helpers v0.6.4 h1:axCks+yV+2MR3/kZhAmy07yC56WZ2Pwu/fKWtKuZB0o= +github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= +github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= +github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 h1:7nllYTGLnq4CqBL27lV6oNfXzM2tJ2mrKF8E+aBXOV0= github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3/go.mod h1:v/MTKot4he5oRHGirOYGN4/hEOONNnWtDBLAzllSGMw= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= -github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= -github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= -github.com/eclipse/paho.mqtt.golang v1.2.0/go.mod h1:H9keYFcgq3Qr5OUJm/JZI/i6U7joQ8SYLhZwfeOo6Ts= -github.com/eclipse/paho.mqtt.golang v1.3.5/go.mod h1:eTzb4gxwwyWpqBUHGQZ4ABAV7+Jgm1PklsYT/eo8Hcc= github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emicklei/go-restful v2.12.0+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emicklei/go-restful v2.15.0+incompatible h1:8KpYO/Xl/ZudZs5RNOEhWMBY4hmzlZhhRd9cu+jrZP4= -github.com/emicklei/go-restful v2.15.0+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/go-restful/v3 v3.8.0 h1:eCZ8ulSerjdAiaNpF7GxXIE7ZCMo1moN1qX+S609eVw= +github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= -github.com/emitter-io/go/v2 v2.0.9/go.mod h1:St++epE1u/6ueCVw47xhu4shpkGNxKRVtkWv4Xi33mg= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -328,56 +517,57 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= -github.com/evanphx/json-patch v0.5.2/go.mod h1:ZWS5hhDbVDyob71nXKNL0+PWn6ToqBHMikGIFbs31qQ= +github.com/esimonov/ifshort v1.0.3/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= +github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U= github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/fasthttp/websocket v1.4.2/go.mod h1:smsv/h4PBEBaU0XDTY5UwJTpZv69fQ0FfcLJr21mA6Y= github.com/fasthttp/websocket v1.4.3-rc.6 h1:omHqsl8j+KXpmzRjF8bmzOSYJ8GnS0E3efi1wYT+niY= github.com/fasthttp/websocket v1.4.3-rc.6/go.mod h1:43W9OM2T8FeXpCWMsBd9Cb7nE2CACNqNvCqQCoty/Lc= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= -github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= +github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/form3tech-oss/jwt-go v3.2.3+incompatible h1:7ZaBxOI7TMoYBfyA3cQHErNNyAWIKUMIwqxEtgHOs5c= github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= +github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/gavv/httpexpect/v2 v2.2.0/go.mod h1:lnd0TqJLrP+wkJk3SFwtrpSlOAZQ7HaaIFuOYbgqgUM= +github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= +github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= +github.com/fzipp/gocyclo v0.3.1/go.mod h1:DJHO6AUmbdqj2ET4Z9iArSuwWgYDRryYt2wASxc7x3E= +github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/gavv/httpexpect/v2 v2.3.1 h1:sGLlKMn8AuHS9ztK9Sb7AJ7OxIL8v2PcLdyxfKt1Fo4= github.com/gavv/httpexpect/v2 v2.3.1/go.mod h1:yOE8m/aqFYQDNrgprMeXgq4YynfN9h1NgcE1+1suV64= github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg= github.com/gdamore/tcell v1.3.0/go.mod h1:Hjvr+Ofd+gLglo7RYKxxnzCBmev3BzsS67MebKS4zMM= github.com/getkin/kin-openapi v0.76.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg= -github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= -github.com/gfleury/go-bitbucket-v1 v0.0.0-20210707202713-7d616f7c18ac/go.mod h1:LB3osS9X2JMYmTzcCArHHLrndBAfcVLQAvUddfs+ONs= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32/go.mod h1:GIjDIg/heH5DOkXY3YJ/wNhfHsQHoXGjl8G8amsYQ1I= github.com/gizak/termui/v3 v3.1.0/go.mod h1:bXQEBkJpzxUAKf0+xq9MSWAvWZlE7c+aidmyFlkYTrY= github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= -github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= +github.com/go-critic/go-critic v0.6.1/go.mod h1:SdNCfU0yF3UBjtaZGw6586/WocupMOJuiqgom5DsQxM= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.1.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34= github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= github.com/go-git/go-git-fixtures/v4 v4.3.1 h1:y5z6dd3qi8Hl+stezc8p3JxDkoTRqMAlKnXHuzrfjTQ= @@ -385,169 +575,86 @@ github.com/go-git/go-git-fixtures/v4 v4.3.1/go.mod h1:8LHG1a3SRW71ettAD/jW13h8c6 github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-logr/logr v1.0.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.2 h1:ahHml/yUpnlb96Rp8HCvtYVPY8ZYpxq3g7UYchIYwbs= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/zapr v1.2.0/go.mod h1:Qa4Bsj2Vb+FAVeAKsLD8RLQ+YRJB8YDmOAKxaBQf7Ro= -github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= -github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= -github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= -github.com/go-openapi/analysis v0.19.2/go.mod h1:3P1osvZa9jKjb8ed2TPng3f0i/UY9snX6gxi44djMjk= -github.com/go-openapi/analysis v0.19.4/go.mod h1:3P1osvZa9jKjb8ed2TPng3f0i/UY9snX6gxi44djMjk= -github.com/go-openapi/analysis v0.19.5/go.mod h1:hkEAkxagaIvIP7VTn8ygJNkd4kAYON2rCu0v0ObL0AU= -github.com/go-openapi/analysis v0.19.10/go.mod h1:qmhS3VNFxBlquFJ0RGoDtylO9y4pgTAUNE9AEEMdlJQ= -github.com/go-openapi/analysis v0.19.16/go.mod h1:GLInF007N83Ad3m8a/CbQ5TPzdnGT7workfHwuVjNVk= -github.com/go-openapi/analysis v0.20.0/go.mod h1:BMchjvaHDykmRMsK40iPtvyOfFdMMxlOmQr9FBZk+Og= -github.com/go-openapi/analysis v0.20.1/go.mod h1:BMchjvaHDykmRMsK40iPtvyOfFdMMxlOmQr9FBZk+Og= -github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY= -github.com/go-openapi/errors v0.17.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= -github.com/go-openapi/errors v0.18.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= -github.com/go-openapi/errors v0.19.2/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94= -github.com/go-openapi/errors v0.19.3/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94= -github.com/go-openapi/errors v0.19.6/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.19.7/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.1/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= -github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= -github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg= -github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= -github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= -github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= -github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= -github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= -github.com/go-openapi/loads v0.19.2/go.mod h1:QAskZPMX5V0C2gvfkGZzJlINuP7Hx/4+ix5jWFxsNPs= -github.com/go-openapi/loads v0.19.3/go.mod h1:YVfqhUCdahYwR3f3iiwQLhicVRvLlU/WO5WPaZvcvSI= -github.com/go-openapi/loads v0.19.5/go.mod h1:dswLCAdonkRufe/gSUC3gN8nTSaB9uaS2es0x5/IbjY= -github.com/go-openapi/loads v0.19.6/go.mod h1:brCsvE6j8mnbmGBh103PT/QLHfbyDxA4hsKvYBNEGVc= -github.com/go-openapi/loads v0.19.7/go.mod h1:brCsvE6j8mnbmGBh103PT/QLHfbyDxA4hsKvYBNEGVc= -github.com/go-openapi/loads v0.20.0/go.mod h1:2LhKquiE513rN5xC6Aan6lYOSddlL8Mp20AW9kpviM4= -github.com/go-openapi/loads v0.20.2/go.mod h1:hTVUotJ+UonAMMZsvakEgmWKgtulweO9vYP2bQYKA/o= -github.com/go-openapi/loads v0.21.0/go.mod h1:rHYve9nZrQ4CJhyeIIFJINGCg1tQpx2yJrrNo8sf1ws= -github.com/go-openapi/runtime v0.0.0-20180920151709-4f900dc2ade9/go.mod h1:6v9a6LTXWQCdL8k1AO3cvqx5OtZY/Y9wKTgaoP6YRfA= -github.com/go-openapi/runtime v0.19.0/go.mod h1:OwNfisksmmaZse4+gpV3Ne9AyMOlP1lt4sK4FXt0O64= -github.com/go-openapi/runtime v0.19.4/go.mod h1:X277bwSUBxVlCYR3r7xgZZGKVvBd/29gLDlFGtJ8NL4= -github.com/go-openapi/runtime v0.19.15/go.mod h1:dhGWCTKRXlAfGnQG0ONViOZpjfg0m2gUt9nTQPQZuoo= -github.com/go-openapi/runtime v0.19.16/go.mod h1:5P9104EJgYcizotuXhEuUrzVc+j1RiSjahULvYmlv98= -github.com/go-openapi/runtime v0.19.24/go.mod h1:Lm9YGCeecBnUUkFTxPC4s1+lwrkJ0pthx8YvyjCfkgk= -github.com/go-openapi/runtime v0.21.1/go.mod h1:aQg+kaIQEn+A2CRSY1TxbM8+sT9g2V3aLc1FbIAnbbs= +github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= -github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= -github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= -github.com/go-openapi/spec v0.19.2/go.mod h1:sCxk3jxKgioEJikev4fgkNmwS+3kuYdJtcsZsD5zxMY= github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= -github.com/go-openapi/spec v0.19.6/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHKHb3gWUSk= -github.com/go-openapi/spec v0.19.8/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHKHb3gWUSk= -github.com/go-openapi/spec v0.19.15/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU= -github.com/go-openapi/spec v0.20.0/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU= -github.com/go-openapi/spec v0.20.1/go.mod h1:93x7oh+d+FQsmsieroS4cmR3u0p/ywH649a3qwC9OsQ= -github.com/go-openapi/spec v0.20.3/go.mod h1:gG4F8wdEDN+YPBMVnzE85Rbhf+Th2DTvA9nFPQ5AYEg= -github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= -github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU= -github.com/go-openapi/strfmt v0.18.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU= -github.com/go-openapi/strfmt v0.19.0/go.mod h1:+uW+93UVvGGq2qGaZxdDeJqSAqBqBdl+ZPMF/cC8nDY= -github.com/go-openapi/strfmt v0.19.2/go.mod h1:0yX7dbo8mKIvc3XSKp7MNfxw4JytCfCD6+bY1AVL9LU= -github.com/go-openapi/strfmt v0.19.3/go.mod h1:0yX7dbo8mKIvc3XSKp7MNfxw4JytCfCD6+bY1AVL9LU= -github.com/go-openapi/strfmt v0.19.4/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk0dgdHXr2Qk= -github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk0dgdHXr2Qk= -github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc= -github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc= -github.com/go-openapi/strfmt v0.20.2/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk= -github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg= -github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= -github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.7/go.mod h1:ao+8BpOPyKdpQz3AOJfbeEVpLmWAvlT1IfTe5McPyhY= -github.com/go-openapi/swag v0.19.9/go.mod h1:ao+8BpOPyKdpQz3AOJfbeEVpLmWAvlT1IfTe5McPyhY= -github.com/go-openapi/swag v0.19.12/go.mod h1:eFdyEBkTdoAf/9RXBvj4cr1nH7GD8Kzo5HTt47gr72M= -github.com/go-openapi/swag v0.19.13/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= -github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= -github.com/go-openapi/validate v0.19.3/go.mod h1:90Vh6jjkTn+OT1Eefm0ZixWNFjhtOH7vS9k0lo6zwJo= -github.com/go-openapi/validate v0.19.10/go.mod h1:RKEZTUWDkxKQxN2jDT7ZnZi2bhZlbNMAuKvKB+IaGx8= -github.com/go-openapi/validate v0.19.12/go.mod h1:Rzou8hA/CBw8donlS6WNEUQupNvUZ0waH08tGe6kAQ4= -github.com/go-openapi/validate v0.19.15/go.mod h1:tbn/fdOwYHgrhPBzidZfJC2MIVvs9GA7monOmWBbeCI= -github.com/go-openapi/validate v0.20.1/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0= -github.com/go-openapi/validate v0.20.3/go.mod h1:goDdqVGiigM3jChcrYJxD2joalke3ZXeftD16byIjA4= -github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= -github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= +github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= -github.com/go-swagger/go-swagger v0.29.0/go.mod h1:Z4GJzI+bHKKkGB2Ji1rawpi3/ldXX8CkzGIa9HAC5EE= -github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013/go.mod h1:b65mBPzqzZWxOZGxSWrqs4GInLIn+u99Q9q7p+GKni0= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= -github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= -github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= -github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= -github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/flect v0.2.3/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc= -github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= -github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= -github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= -github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= -github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= -github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= -github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= -github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= -github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= -github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= -github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= -github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= -github.com/gobwas/glob v0.2.4-0.20181002190808-e7a84e9525fe/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= +github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/astequal v1.0.1/go.mod h1:4oGA3EZXTVItV/ipGiOx7NWkY5veFfcsOJVS2YxltLw= +github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= +github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= +github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= +github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= +github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= +github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU= +github.com/gogo/googleapis v1.4.0/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.3.0 h1:kHL1vqdqWNfATmA0FNMdmZNMyZI1U6O31X4rlIPoBog= +github.com/golang-jwt/jwt/v4 v4.3.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -565,7 +672,6 @@ github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71 github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.0.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -585,14 +691,24 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= +github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= +github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= +github.com/golangci/golangci-lint v1.43.0/go.mod h1:VIFlUqidx5ggxDfQagdvd9E67UjMXtTHBkBQ7sHoC5Q= +github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= +github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= +github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= +github.com/golangci/revgrep v0.0.0-20210930125155-c22e5001d4f2/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY= +github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= -github.com/google/cel-go v0.9.0/go.mod h1:U7ayypeSkw23szu4GaQTPJGx66c20mx8JklMSxrmI1w= -github.com/google/cel-spec v0.6.0/go.mod h1:Nwjgxy5CbjlPrtCWjeDjUyKMl8w41YBYGjsyDdqk0xA= +github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= +github.com/google/certificate-transparency-go v1.1.1/go.mod h1:FDKqPvSXawb2ecErVRrD+nfy23RCzyl7eqVCEmlT1Zs= +github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54= +github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -605,10 +721,15 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= -github.com/google/go-github/v41 v41.0.0/go.mod h1:XgmCA5H323A9rtgExdTcnDkcqp6S30AVACCBDOonIxg= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-containerregistry v0.8.1-0.20220110151055-a61fd0a8e2bb h1:hdevkgIzFpx/Xbz+L2JB+UrmglBf0ZSBZo0tkzzh26s= +github.com/google/go-containerregistry v0.8.1-0.20220110151055-a61fd0a8e2bb/go.mod h1:wW5v71NHGnQyb4k+gSshjxidrC7lN33MdWEn+Mz9TsI= +github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220411142604-2042cc9d6401 h1:ojEm+gWlwYBNWfpQeIizE+6zy6GfghjnE0o+y7JQhGg= +github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220411142604-2042cc9d6401/go.mod h1:gm/Zjh0iiPBfwgDIYgHJCRxaGzBZu1njCgwX1EmC1Tw= +github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220301182634-bfe2ffc6b6bd h1:DVnBwEU/77+h/Celwk8RLwnYiuaIdBv2gguN3xHfcJM= +github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220301182634-bfe2ffc6b6bd/go.mod h1:MO/Ilc3XTxy/Pi8aMXEiRUl6icOqResFyhSFCLlqtR8= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= @@ -628,6 +749,7 @@ github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -638,70 +760,92 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= +github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.1.0 h1:zO8WHNx/MYiAKJ3d5spxZXZE6KHmIQGQcAzwUzV7qQw= +github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= -github.com/googleapis/gnostic v0.5.5 h1:9fHAtK0uDfpveeqqo1hkEZJcFvYXAiCN3UutL8F9xHw= -github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= +github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= +github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254/go.mod h1:M9mZEtGIsR1oDaZagNPNG9iq9n2HrhZ17dsXk73V3Lw= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= +github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= -github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= -github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v1.0.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= +github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= +github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= +github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= +github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= +github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.14.6/go.mod h1:zdiPV4Yse/1gnckTHtghG4GkDEdKCRJduHpTxT3/jcw= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.9.1/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.1.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-msgpack v1.1.5/go.mod h1:gWVc3sv/wbDmR3rQsj1CAktEZzoz1YNK9NfGLXJ69/4= +github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= @@ -711,6 +855,7 @@ github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -724,12 +869,12 @@ github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/ github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/raft v1.3.3/go.mod h1:4Ak7FSPnuvmb0GV6vgIAJ4vYT4bek9bb6Q+7HVbyzqM= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/hokaccha/go-prettyjson v0.0.0-20190818114111-108c894c2c0e/go.mod h1:pFlLw2CfqZiIBOx6BuCeRLCrfxBJipTY0nIOF/VbGcI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= +github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= @@ -737,7 +882,10 @@ github.com/iancoleman/strcase v0.1.1/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5N github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= @@ -746,36 +894,33 @@ github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/itchyny/gojq v0.12.6/go.mod h1:ZHrkfu7A+RbZLy5J1/JKpS4poEqrzItSTGDItqsfP0A= -github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= -github.com/jawher/mow.cli v1.0.4/go.mod h1:5hQj2V8g+qYmLUVWqu4Wuja1pI57M83EChYLVZ0sMKk= -github.com/jawher/mow.cli v1.1.0/go.mod h1:aNaQlc7ozF3vw6IJ2dHjp2ZFiA4ozMIYY6PyuRJwlUg= +github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= -github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= github.com/jcmturner/gofork v0.0.0-20180107083740-2aebee971930/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8= github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= -github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= -github.com/jcmturner/gokrb5/v8 v8.4.2/go.mod h1:sb+Xq/fTY5yktf/VxLsE3wlfPqQjp0aWNYyvBVK62bc= -github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= -github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= +github.com/jhump/protoreflect v1.6.1/go.mod h1:RZQ/lnuN+zqeRVpQigTwO6o0AJUkxbnSnpuG7toUTG4= +github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= +github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= +github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= -github.com/joncalhoun/qson v0.0.0-20200422171543-84433dcd3da0/go.mod h1:DFXrEwSRX0p/aSvxE21319menCBFeQO0jXpRj7LEZUA= +github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= +github.com/josharian/txtarfs v0.0.0-20210218200122-0702f000015a/go.mod h1:izVPOvVRsHiKkeGCT6tYBNWyDVuzj9wAaBb5R9qamfw= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -785,34 +930,33 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b/go.mod h1:HMcgvsgd0Fjj4XXDkbjdmlbI505rUPBs6WBMYg2pXks= +github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= +github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg= github.com/karrick/godirwalk v1.7.8/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46shStcFDJ34= -github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= -github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck= github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/errcheck v1.6.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.10.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.14.2 h1:S0OHlFk/Gbon/yauFJ4FfJJF5V0fc5HbBTJazi28pRw= -github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/cpuid v1.3.1 h1:5JNjFYYQrZeKRJ0734q51WCEEn2huer72Dc7K+R/b6s= github.com/klauspost/cpuid v1.3.1/go.mod h1:bYW4mA6ZgKPob1/Dlai2LviZJO7KGI3uoWLd42rAQw4= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.1.0 h1:eyi1Ad2aNJMW95zcSbmGg7Cg6cq3ADwLpMAP96d8rF0= +github.com/klauspost/cpuid/v2 v2.1.0/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -824,43 +968,58 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/ktrysmt/go-bitbucket v0.9.32/go.mod h1:FWxy2UK7GlK5b0NSJGc5hPqnssVlkNnsChvyuOf/Xno= +github.com/kulti/thelper v0.4.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= +github.com/kunwardeep/paralleltest v1.0.3/go.mod h1:vLydzomDFpk7yu5UX02RmP0H8QfRPOV/oFhWN85Mjb4= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= github.com/labstack/echo v3.2.1+incompatible/go.mod h1:0INS7j/VjnFxD4E2wkz67b8cVwCLbBmJyDaka6Cmk1s= github.com/labstack/gommon v0.2.7/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4= +github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= +github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= +github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.4 h1:SO9z7FRPzA03QhHKJrH5BXA6HU1rS4V2nIVrrNC1iYk= github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1fYclkSPilDOKW0s= github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.4/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.1/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= +github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= -github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A= +github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= +github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= +github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= @@ -874,24 +1033,34 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= +github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= +github.com/mgechev/revive v1.1.2/go.mod h1:bnXsMr+ZTH09V5rssEI+jHAZ4z+ZdyhgO/zsy3EhK+0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/minio/highwayhash v1.0.1/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= -github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= -github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4= +github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw= -github.com/minio/minio-go/v7 v7.0.2/go.mod h1:dJ80Mv2HeGkYLH1sqS/ksz07ON6csH3S6JUMSQ2zAns= -github.com/minio/minio-go/v7 v7.0.15/go.mod h1:pUV0Pc+hPd1nccgmzQF/EXh48l/Z/yps6QPF1aaie4g= -github.com/minio/minio-go/v7 v7.0.23 h1:NleyGQvAn9VQMU+YHVrgV4CX+EPtxPt/78lHOOTncy4= -github.com/minio/minio-go/v7 v7.0.23/go.mod h1:ei5JjmxwHaMrgsMrn4U/+Nmg+d8MKS1U2DAn1ou4+Do= -github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.29/go.mod h1:x81+AX5gHSfCSqw7jxRKHvxUXMlE5uKX0Vb75Xk5yYg= +github.com/minio/minio-go/v7 v7.0.36 h1:KPzAl8C6jcRFEUsGUHR6deRivvKATPNZThzi7D9y/sc= +github.com/minio/minio-go/v7 v7.0.36/go.mod h1:nCrRzjoSUQh8hgKKtu3Y708OLvRLtuASMg2/nvmbarw= github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= +github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= +github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= +github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= @@ -900,29 +1069,33 @@ github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HK github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8= -github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk= +github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v0.0.0-20180220230111-00c29f56e238/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= -github.com/moby/term v0.0.0-20210610120745-9d4ed1856297/go.mod h1:vgPCkQMyxTZ7IDy8SXRufE172gr8+K/JE/7hHFxHW3A= +github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= +github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= +github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -930,69 +1103,110 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/montanaflynn/stats v0.6.6/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= +github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= +github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= +github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= -github.com/nats-io/gnatsd v1.4.1/go.mod h1:nqco77VO78hLCJpIcVfygDP2rPGfsEHkGTUk94uh5DQ= -github.com/nats-io/go-nats v1.7.2/go.mod h1:+t7RHT5ApZebkrQdnn6AhQJmhJJiKAvJUio1PiiCtj0= -github.com/nats-io/graft v0.0.0-20200605173148-348798afea05/go.mod h1:idnzXeCwCx69FMg+R0DyD4/OhrF1A+v3BqF5xSz+tS4= -github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= -github.com/nats-io/jwt/v2 v2.2.1-0.20220113022732-58e87895b296/go.mod h1:0tqz9Hlu6bCBFLWAASKhE5vUA4c24L9KPUUgvwumE/k= -github.com/nats-io/nats-server/v2 v2.1.7/go.mod h1:rbRrRE/Iv93O/rUvZ9dh4NfT0Cm9HWjW/BqOWLGgYiE= -github.com/nats-io/nats-server/v2 v2.7.2/go.mod h1:tckmrt0M6bVaDT3kmh9UrIq/CBOBBse+TpXQi5ldaa8= -github.com/nats-io/nats-streaming-server v0.24.1/go.mod h1:N2Q05hKD+aW2Ur1VYP85yUR2zUWHbqJG88CxAFLRrd4= -github.com/nats-io/nats.go v1.10.0/go.mod h1:AjGArbfyR50+afOUotNX2Xs5SYHf+CoOa5HH1eEl2HE= -github.com/nats-io/nats.go v1.13.0/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w= -github.com/nats-io/nats.go v1.13.1-0.20220121202836-972a071d373d/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w= -github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nkeys v0.1.4/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= -github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/nats-io/stan.go v0.10.2/go.mod h1:vo2ax8K2IxaR3JtEMLZRFKIdoK/3o1/PKueapB7ezX0= -github.com/nicksnyder/go-i18n v1.10.1-0.20190510212457-b280125b035a/go.mod h1:e4Di5xjP9oTVrC6y3C7C0HoSYXjSbhh/dU0eUV32nB4= +github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= +github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= +github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nishanths/exhaustive v0.2.3/go.mod h1:bhIX678Nx8inLM9PbpvK1yv6oGtoP8BfaIeMzgBNKvc= +github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= +github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= github.com/nsf/termbox-go v0.0.0-20190121233118-02980233997d/go.mod h1:IuKpRQcYE1Tfu+oAQqaLisqDeXgjyyltCfsaoYN18NQ= -github.com/nsqio/go-nsq v1.1.0/go.mod h1:vKq36oyeVXgsS5Q8YEO7WghqidAVXQlcFxzQbQTuDEY= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.2/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852 h1:Yl0tPBa8QPjGmesFh1D0rDy+q1Twx6FyU7VWHi8wZbI= github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852/go.mod h1:eqOVx5Vwu4gd2mmMZvVZsgIqNSaW3xxRThUJ0k/TPk4= +github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.1.4 h1:GNapqRSid3zijZ9H77KrgVG4/8KqiyRsxcSxe+7ApXY= +github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.17.0 h1:9Luw4uT5HTjHTN8+aNcSThgH1vdXnmdJ8xIfZ4wyTRE= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= +github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0-rc1.0.20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.3-0.20220114050600-8b9d41f48198 h1:+czc/J8SlhPKLOtVLMQc+xDCFBT73ZStMsRhSsUhsSg= +github.com/opencontainers/image-spec v1.0.3-0.20220114050600-8b9d41f48198/go.mod h1:j4h1pJW6ZcJTgMZWP3+7RlG3zTaP02aDZ/Qw0sppK7Q= +github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= +github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= +github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs= +github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE= +github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= +github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= -github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg= +github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= +github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 h1:Qj1ukM4GlMWXNdMBuXcXfz/Kw9s1qm0CLY32QxuSImI= +github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -1001,205 +1215,267 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= -github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= +github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= +github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= +github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.28.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= +github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= +github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= +github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/radovskyb/watcher v1.0.7/go.mod h1:78okwvY5wPdzcb1UYnip1pvrZNIVEIh/Cm+ZuvsUYIg= -github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= +github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= +github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= +github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= +github.com/quasilyte/go-ruleguard v0.3.13/go.mod h1:Ul8wwdqR6kBVOCt2dipDBkE+T6vAV/iixkrKuRTN1oQ= +github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.10/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= +github.com/quasilyte/go-ruleguard/rules v0.0.0-20210428214800-545e0d2e0bf7/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= +github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/rivo/tview v0.0.0-20200219210816-cd38d7432498/go.mod h1:6lkG1x+13OShEf0EaOCaTQYyB7d5nSbb181KtjlS+84= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= -github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY= +github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoLtnBZ7/TEhXAbg= +github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= +github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= -github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4= -github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= +github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/savsgio/gotils v0.0.0-20200117113501-90175b0fbe3f/go.mod h1:lHhJedqxCoHN+zMtwGNTXWmF0u9Jt363FYRhV6g0CdY= github.com/savsgio/gotils v0.0.0-20210617111740-97865ed5a873 h1:N3Af8f13ooDKcIhsmFT7Z05CStZWu4C7Md0uDEy4q6o= github.com/savsgio/gotils v0.0.0-20210617111740-97865ed5a873/go.mod h1:dmPawKuiAeG/aFYVs2i+Dyosoo7FNcm+Pi8iK6ZUrX8= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= +github.com/securego/gosec/v2 v2.9.1/go.mod h1:oDcDLcatOJxkCGaCaq8lua1jTnYf6Sou4wdiJ1n4iHc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sethvargo/go-limiter v0.7.2 h1:FgC4N7RMpV5gMrUdda15FaFTkQ/L4fEqM7seXMs4oO8= +github.com/sethvargo/go-limiter v0.7.2/go.mod h1:C0kbSFbiriE5k2FFOe18M1YZbAR2Fiwf72uGu0CXCcU= +github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= +github.com/shirou/gopsutil/v3 v3.21.10/go.mod h1:t75NhzCZ/dYyPQjyQmrAYP6c8+LCdFANeBMdLPCNnew= github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sivchari/tenv v1.4.7/go.mod h1:5nF+bITvkebQVanjU6IuMbvIot/7ReNsUV7I5NbprB0= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= -github.com/slack-go/slack v0.10.2/go.mod h1:5FLdBRv7VW/d9EBxx/eEktOptWygbA9K2QK/KW7ds1s= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js= github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= +github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= +github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/afero v1.4.1/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/afero v1.8.0 h1:5MmtuhAgYeU6qpa7w7bP0dv6MBYuup0vekhSpSkoq60= -github.com/spf13/afero v1.8.0/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= +github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo= github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= -github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q= -github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= +github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= +github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= -github.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk= -github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= +github.com/spf13/viper v1.13.0 h1:BWSJ/M+f+3nmdz9bxB+bWX28kkALN2ok11D0rSo8EJU= +github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw= +github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= +github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= -github.com/streadway/amqp v1.0.0/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stripe/stripe-go v70.15.0+incompatible/go.mod h1:A1dQZmO/QypXmsL0T8axYZkSN/uA/T/A64pfKdBAMiY= -github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.13.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.14.0 h1:6aeJ0bzojgWLa82gDQHcx3S0Lr/O51I9bJ5nv6JFx5w= -github.com/tidwall/gjson v1.14.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/sylvia7788/contextcheck v1.0.4/go.mod h1:vuPKJMQ7MQ91ZTqfdyreNKwZjyUg6KO+IebVyQDedZQ= +github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= +github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= +github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= +github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= +github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw= +github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/sjson v1.2.4/go.mod h1:098SZ494YoMWPmMO6ct4dcFnqxwj9r/gF0Etp19pSNM= +github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= +github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs= +github.com/tklauser/numcpus v0.3.0/go.mod h1:yFGUr7TUHQRAhyqBcEg0Ge34zDBAsIvJJcyE6boqnA8= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= +github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tomarrell/wrapcheck/v2 v2.4.0/go.mod h1:68bQ/eJg55BROaRTbMjC7vuhL2OgfoG8bLp9ZyoBfyY= +github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= +github.com/tommy-muehle/go-mnd/v2 v2.4.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= -github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= +github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= +github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/cli v1.22.4/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/uudashr/gocognit v1.0.5/go.mod h1:wgYz0mitoKOTysqxTDMOUXg+Jb5SvtihkfmugIZYpEA= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.9.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= -github.com/valyala/fasthttp v1.27.0 h1:gDefRDL9aqSiwXV6aRW8aSBPs82y4KizSzHrBLf4NDI= github.com/valyala/fasthttp v1.27.0/go.mod h1:cmWIqlu99AO/RKcp1HWaViTqc57FswJOfYYdPJBl8BA= +github.com/valyala/fasthttp v1.30.0 h1:nBNzWrgZUUHohyLPU/jTvXdhrcaf2m5k3bWk+3Q049g= +github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= github.com/valyala/fasttemplate v0.0.0-20170224212429-dcecefd839c4/go.mod h1:50wTf68f99/Zt14pr046Tgt3Lp2vLyFZKzbFXTOabXw= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/valyala/gozstd v1.7.0/go.mod h1:y5Ew47GLlP37EkTB+B4s7r6A5rdaeB7ftbl9zoYiIPQ= -github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8= github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= -github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= -github.com/weaveworks/promrus v1.2.0/go.mod h1:SaE82+OJ91yqjrE1rsvBWVzNZKcHYFtMUyS1+Ogs/KA= +github.com/vbatts/tar-split v0.11.2 h1:Via6XqJr0hceW4wff3QRzD5gAk/tatMw/4ZA7cTlIME= +github.com/vbatts/tar-split v0.11.2/go.mod h1:vV3ZuO2yWSVsz+pfFzDG/upWH1JhjOiEaWq6kXyQ3VI= +github.com/viki-org/dnscache v0.0.0-20130720023526-c70c1f23c5d8/go.mod h1:dniwbG03GafCjFohMDmz6Zc6oCuiqgH6tGNyXTkHzXE= +github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= +github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= +github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= +github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= +github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= +github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= github.com/whilp/git-urls v1.0.0 h1:95f6UMWN5FKW71ECsXRUd3FVYiXdrE7aX4NZKcPmIjU= github.com/whilp/git-urls v1.0.0/go.mod h1:J16SAmobsqc3Qcy98brfl5f5+e0clUvg1krgwk/qCfE= -github.com/xanzy/go-gitlab v0.55.1/go.mod h1:F0QEXwmqiBUxCgJm8fE9S+1veX4XC9Z4cfaAbqwk4YM= -github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= +github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= +github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= github.com/xanzy/ssh-agent v0.3.1 h1:AmzO1SSWxw73zxFZPRwaMN1MohDw8UyHnmuxyceTEGo= github.com/xanzy/ssh-agent v0.3.1/go.mod h1:QIE4lCeL7nkC25x+yA3LBIYfwCc1TFziCtG7cBAac6w= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= -github.com/xdg-go/scram v1.1.0/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= -github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= -github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= -github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= github.com/xeipuuv/gojsonschema v1.1.0/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yahoo/athenz v1.8.55/go.mod h1:G7LLFUH7Z/r4QAB7FfudfuA7Am/eCzO1GlzBhDL6Kv0= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M= @@ -1213,30 +1489,23 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= +github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= +github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= +github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= +go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= -go.etcd.io/etcd/client/v3 v3.5.0/go.mod h1:AIKXXVX/DQXtfTEqBryiLTUXwON+GuvO6Z7lLS/oTh0= -go.etcd.io/etcd/pkg/v3 v3.5.0/go.mod h1:UzJGatBQ1lXChBkQF0AuAtkRQMYnHubxAEYIrC3MSsE= -go.etcd.io/etcd/raft/v3 v3.5.0/go.mod h1:UFOHSIvO/nKwd4lhkwabrTD3cqW5yVyYYf/KlD00Szc= -go.etcd.io/etcd/server/v3 v3.5.0/go.mod h1:3Ah5ruV+M+7RZr0+Y/5mNLwC+eQlni+mQmOVdCRJoS4= -go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= -go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= -go.mongodb.org/mongo-driver v1.3.0/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= -go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= -go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= -go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= -go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= -go.mongodb.org/mongo-driver v1.8.2/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= +go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -1245,75 +1514,56 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/contrib v0.20.0/go.mod h1:G/EtFaa6qaN7+LxqfIAT3GiZa7Wv5DTBUzl5H4LY0Kc= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.20.0/go.mod h1:oVGt1LRbBOBq1A5BQLlUg9UaU/54aiHw8cgjV3aWZ/E= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.20.0/go.mod h1:2AboqHi0CiIZU0qwhtUfCYD1GeUzvvIXWNkhDt7ZMG4= -go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzoxm/dooo= -go.opentelemetry.io/otel/exporters/otlp v0.20.0/go.mod h1:YIieizyaN77rtLJra0buKiNBOm9XQfkPEKBeuhoMwAM= -go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU= -go.opentelemetry.io/otel/oteltest v0.20.0/go.mod h1:L7bgKf9ZB7qCwT9Up7i9/pn0PWIa9FqQ2IQ8LoxiGnw= -go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m7hBWC279Yc= -go.opentelemetry.io/otel/sdk/export/metric v0.20.0/go.mod h1:h7RBNMsDJ5pmI1zExLi+bJK+Dr8NQCh0qGhm1KDnNlE= -go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE= -go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= -go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= -go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= -go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= +golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180723164146-c126467f60eb/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed h1:YoWVYYAfvQ4ddHv3OKmIvX7NCAhFGTj62VP2l2kfBbA= -golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= @@ -1323,8 +1573,10 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= golang.org/x/exp v0.0.0-20200908183739-ae8ad444f925/go.mod h1:1phAWC201xIgDyaFpmDeZkgf70Q4Pd/CNqfRtVPtxNw= -golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= +golang.org/x/exp v0.0.0-20220602145555-4a0574d9293f h1:KK6mxegmt5hGJRcAnEDjSNLxIRhZxDcgwMbcO/lMCRM= +golang.org/x/exp v0.0.0-20220602145555-4a0574d9293f/go.mod h1:yh0Ynu2b5ZUe3MQfp2nM0ecK7wsgouWTDN0FNeJuIys= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1354,12 +1606,11 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180921000356-2f5d2388922f/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1367,16 +1618,17 @@ golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -1388,17 +1640,16 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200425230154-ff2c4b7c35a0/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200505041828-1ed23360d12c/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -1411,26 +1662,26 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220121210141-e204ce36a2ba/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f h1:oA4XRj0qtSt8Yo1Zms0CUlsT3KG69V2UGQWPBxujDmc= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20220909164309-bea034e7d591 h1:D0B/7al0LLrVC8aWF4+oxpv/m8bc7ViFfVS8/gXGdqI= +golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1448,8 +1699,12 @@ golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a h1:qfl7ob3DIEs3Ml9oLuPwY2N04gymzAW04WsUQHIClgM= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094 h1:2o1E+E8TpNLklK9nHiPiK1uzIYrIHt+cQx3ynCwq9V8= +golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1461,10 +1716,10 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1473,26 +1728,26 @@ golang.org/x/sys v0.0.0-20181019160139-8e24a49d80f8/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190321052220-f7bb7a8bee54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190522044717-8097e1b27ff5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626150813-e07cf5db2756/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190812073006-9eafafc0a87e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1501,35 +1756,50 @@ golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200217220822-9197077df867/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201005172224-997123666555/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1543,41 +1813,48 @@ golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210608053332-aa57babbf139/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816074244-15123e1e1f71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211029165221-6e7872819dc8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220111092808-5a964db01320/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5 h1:y/woIyUBFbpQGKS0u1aHF/40WUDnek3fPOyD08H5Vng= -golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1591,46 +1868,49 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44= golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220224211638-0e9765cccd65 h1:M73Iuj3xbbb9Uk1DYhzydthsj6oOd6l9bpuFcNoUvTs= +golang.org/x/time v0.0.0-20220224211638-0e9765cccd65/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190424220101-1e8e1cfdf96b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190808195139-e713427fea3f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1638,9 +1918,11 @@ golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1650,46 +1932,73 @@ golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.6-0.20210820212750-d4cc65f0b2ff/go.mod h1:YD9qOF0M9xpSpdWTBbzEl5e/RnCefISl8E5Noe10jFM= +golang.org/x/tools v0.1.6/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gomodules.xyz/jsonpatch/v2 v2.2.0/go.mod h1:WXp+iVDkoLQqPudfQ9GBlwB2eZ5DKOnjQZCYdOS8GPY= -gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= -gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= -gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -1716,40 +2025,48 @@ google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6 google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= -google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= -google.golang.org/api v0.73.0 h1:O9bThUh35K1rvUrQwTUQ1eqLC/IYyzUpWavYIO2EXvo= -google.golang.org/api v0.73.0/go.mod h1:lbd/q6BRFJbdpV6OUCXstVeiI5mL/d3/WifG7iNKnjI= -google.golang.org/appengine v1.0.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= +google.golang.org/api v0.96.0 h1:F60cuQPJq7K7FzsxMYHAUJSiXh2oKctHxBMbDygxhfM= +google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk= +google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190522204451-c2c4e71fbf69/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200117163144-32f20d992d24/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -1765,13 +2082,15 @@ google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200626011028-ee7919e894b5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200707001353-8e8330bf89df/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201102152239-715cce707fb0/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -1781,6 +2100,7 @@ google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= @@ -1797,10 +2117,8 @@ google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211018162055-cf77aa76bad2/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= @@ -1808,26 +2126,42 @@ google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6 h1:FglFEfyj61zP3c6LgjmVHxYxZWXYul9oiS1EZqD5gLc= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220810155839-1856144b1d9c h1:IooGDWedfLC6KLczH/uduUsKQP42ZZYhKx+zd50L1Sk= +google.golang.org/genproto v0.0.0-20220810155839-1856144b1d9c/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.0/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= @@ -1848,8 +2182,12 @@ google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1863,18 +2201,25 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/go-playground/webhooks.v5 v5.17.0 h1:truBced5ZmkiNKK47cM8bMe86wUSjNks7SFMuNKwzlc= gopkg.in/go-playground/webhooks.v5 v5.17.0/go.mod h1:LZbya/qLVdbqDR1aKrGuWV6qbia2zCYSR5dpom2SInQ= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= @@ -1882,15 +2227,15 @@ gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.66.3 h1:jRskFVxYaMGAMUbN0UZ7niA9gzL9B49DOqE78vg0k3w= -gopkg.in/ini.v1 v1.66.3/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/jcmturner/aescts.v1 v1.0.1 h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw= gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= gopkg.in/jcmturner/dnsutils.v1 v1.0.1 h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM= gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= gopkg.in/jcmturner/goidentity.v2 v2.0.0 h1:6Bmcdaxb0dD3HyHbo/MtJ2Q1wXLDuZJFwXZmuZvM+zw= -gopkg.in/jcmturner/goidentity.v2 v2.0.0/go.mod h1:vCwK9HeXksMeUmQ4SxDd1tRz4LejrKh3KRVjQWhjvZI= gopkg.in/jcmturner/gokrb5.v5 v5.3.0 h1:RS1MYApX27Hx1Xw7NECs7XxGxxrm69/4OmaRuX9kwec= gopkg.in/jcmturner/gokrb5.v5 v5.3.0/go.mod h1:oQz8Wc5GsctOTgCVyKad1Vw4TCWz5G6gfIQr88RPv4k= gopkg.in/jcmturner/rpc.v0 v0.0.2 h1:wBTgrbL1qmLBUPsYVCqdJiI5aJgQhexmK+JkTHPUNJI= @@ -1900,7 +2245,7 @@ gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3M gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= @@ -1914,18 +2259,21 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.6/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= +gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= +gotest.tools/v3 v3.1.0 h1:rVV8Tcg/8jHUkPUorwjaMTtemIMVXfIPKiOqnhEhakk= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -1933,69 +2281,77 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.2.1/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= k8s.io/api v0.17.8/go.mod h1:N++Llhs8kCixMUoCaXXAyMMPbo8dDVnh+IQ36xZV2/0= -k8s.io/api v0.23.0/go.mod h1:8wmDdLBHBNxtOIytwLstXt5E9PddnZb0GaMcqsvDBpg= -k8s.io/api v0.23.3 h1:KNrME8KHGr12Ozjf8ytOewKzZh6hl/hHUZeHddT3a38= -k8s.io/api v0.23.3/go.mod h1:w258XdGyvCmnBj/vGzQMj6kzdufJZVUwEM1U2fRJwSQ= -k8s.io/apiextensions-apiserver v0.23.0/go.mod h1:xIFAEEDlAZgpVBl/1VSjGDmLoXAWRG40+GsWhKhAxY4= -k8s.io/apiextensions-apiserver v0.23.3/go.mod h1:/ZpRXdgKZA6DvIVPEmXDCZJN53YIQEUDF+hrpIQJL38= +k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= +k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= +k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= +k8s.io/api v0.24.3 h1:tt55QEmKd6L2k5DP6G/ZzdMQKvG5ro4H4teClqm0sTY= +k8s.io/api v0.24.3/go.mod h1:elGR/XSZrS7z7cSZPzVWaycpJuGIw57j9b95/1PdJNI= k8s.io/apimachinery v0.17.8/go.mod h1:Lg8zZ5iC/O8UjCqW6DNhcQG2m4TdjF9kwG3891OWbbA= -k8s.io/apimachinery v0.23.0/go.mod h1:fFCTTBKvKcwTPFzjlcxp91uPFZr+JA0FubU4fLzzFYc= -k8s.io/apimachinery v0.23.3 h1:7IW6jxNzrXTsP0c8yXz2E5Yx/WTzVPTsHIx/2Vm0cIk= -k8s.io/apimachinery v0.23.3/go.mod h1:BEuFMMBaIbcOqVIJqNZJXGFTP4W6AycEpb5+m/97hrM= -k8s.io/apiserver v0.23.0/go.mod h1:Cec35u/9zAepDPPFyT+UMrgqOCjgJ5qtfVJDxjZYmt4= -k8s.io/apiserver v0.23.3/go.mod h1:3HhsTmC+Pn+Jctw+Ow0LHA4dQ4oXrQ4XJDzrVDG64T4= +k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= +k8s.io/apimachinery v0.24.3 h1:hrFiNSA2cBZqllakVYyH/VyEh4B581bQRmqATJSeQTg= +k8s.io/apimachinery v0.24.3/go.mod h1:82Bi4sCzVBdpYjyI4jY6aHX+YCUchUIrZrXKedjd2UM= +k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= +k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= +k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= k8s.io/client-go v0.17.8/go.mod h1:SJsDS64AAtt9VZyeaQMb4Ck5etCitZ/FwajWdzua5eY= -k8s.io/client-go v0.23.0/go.mod h1:hrDnpnK1mSr65lHHcUuIZIXDgEbzc7/683c6hyG4jTA= -k8s.io/client-go v0.23.3 h1:23QYUmCQ/W6hW78xIwm3XqZrrKZM+LWDqW2zfo+szJs= -k8s.io/client-go v0.23.3/go.mod h1:47oMd+YvAOqZM7pcQ6neJtBiFH7alOyfunYN48VsmwE= -k8s.io/code-generator v0.23.0/go.mod h1:vQvOhDXhuzqiVfM/YHp+dmg10WDZCchJVObc9MvowsE= -k8s.io/code-generator v0.23.3/go.mod h1:S0Q1JVA+kSzTI1oUvbKAxZY/DYbA/ZUb4Uknog12ETk= -k8s.io/component-base v0.23.0/go.mod h1:DHH5uiFvLC1edCpvcTDV++NKULdYYU6pR9Tt3HIKMKI= -k8s.io/component-base v0.23.3/go.mod h1:1Smc4C60rWG7d3HjSYpIwEbySQ3YWg0uzH5a2AtaTLg= +k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= +k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= +k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= +k8s.io/client-go v0.24.3 h1:Nl1840+6p4JqkFWEW2LnMKU667BUxw03REfLAVhuKQY= +k8s.io/client-go v0.24.3/go.mod h1:AAovolf5Z9bY1wIg2FZ8LPQlEdKHjLI7ZD4rw920BJw= +k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= +k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= +k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= +k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= +k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20201203183100-97869a43a9d9/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= -k8s.io/gengo v0.0.0-20211115164449-b448ea381d54/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/gengo v0.0.0-20220613173612-397b4ae3bce7 h1:RGb68G3yotdQggcyenx9y0+lnVJCXXcLa6geXOMlf5o= +k8s.io/gengo v0.0.0-20220613173612-397b4ae3bce7/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v0.2.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.5.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= -k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/klog/v2 v2.40.1 h1:P4RRucWk/lFOlDdkAr3mc7iWFkgKrZY9qZMAgek06S4= -k8s.io/klog/v2 v2.40.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/klog/v2 v2.60.1 h1:VW25q3bZx9uE3vvdL6M8ezOX79vA2Aq1nEWLqNQclHc= +k8s.io/klog/v2 v2.60.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/kube-openapi v0.0.0-20200410145947-bcb3869e6f29/go.mod h1:F+5wygcW0wmRTnM3cOgIqGivxkwSWIWT5YdsDbeAOaU= -k8s.io/kube-openapi v0.0.0-20211115234752-e816edb12b65/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk= -k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf h1:M9XBsiMslw2lb2ZzglC0TOkBPK5NQi0/noUrdnoFwUg= -k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk= +k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= +k8s.io/kube-openapi v0.0.0-20220328201542-3ee0da9b0b42/go.mod h1:Z/45zLw8lUo4wdiUkI+v/ImEGAvu3WatcZl3lPMR4Rk= +k8s.io/kube-openapi v0.0.0-20220627174259-011e075b9cb8 h1:yEQKdMCjzAOvGeiTwG4hO/hNVNtDOuUFvMUZ0OlaIzs= +k8s.io/kube-openapi v0.0.0-20220627174259-011e075b9cb8/go.mod h1:mbJ+NSUoAhuR14N0S63bPkh8MGVSo3VYSGZtH/mfMe0= +k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= +k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20211116205334-6203023598ed/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 h1:HNSDgDCrr/6Ly3WEGKZftiE7IY19Vz2GdbOCyI4qqhc= k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= moul.io/http2curl v1.0.1-0.20190925090545-5cd742060b0e h1:C7q+e9M5nggAvWfVg9Nl66kebKeuJlP3FD58V4RR5wo= moul.io/http2curl v1.0.1-0.20190925090545-5cd742060b0e/go.mod h1:nejbQVfXh96n9dSF6cH3Jsk/QI1Z2oEL7sSI2ifXFNA= +mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= +mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.25/go.mod h1:Mlj9PNLmG9bZ6BHFwFKDo5afkpWyUISkb9Me0GnK66I= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.27/go.mod h1:tq2nT0Kx7W+/f2JVE+zxYtUhdjuELJkVpNz+x/QN5R4= -sigs.k8s.io/controller-runtime v0.11.1 h1:7YIHT2QnHJArj/dk9aUkYhfqfK5cIxPOX5gPECfdZLU= -sigs.k8s.io/controller-runtime v0.11.1/go.mod h1:KKwLiTooNGu+JmLZGn9Sl3Gjmfj66eMbCQznLP5zcqA= -sigs.k8s.io/controller-tools v0.8.0/go.mod h1:qE2DXhVOiEq5ijmINcFbqi9GZrrUjzB1TuJU0xa6eoY= -sigs.k8s.io/json v0.0.0-20211020170558-c049b76a60c6/go.mod h1:p4QtZmO4uMYipTQNzagwnNoseA6OxSUutVw05NhYDRs= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 h1:kDi4JBNAsJWfz1aEXhO8Jg87JJaPNLh5tIzYHgStQ9Y= sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2/go.mod h1:B+TnT182UBxE84DiCz4CVE26eOSDAeYCpfDnC2kdKMY= sigs.k8s.io/structured-merge-diff/v2 v2.0.1/go.mod h1:Wb7vfKAodbKgf6tn1Kl0VvGj7mRH6DGaRcixXEJXTsE= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= -sigs.k8s.io/structured-merge-diff/v4 v4.2.0/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= +sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/structured-merge-diff/v4 v4.2.1 h1:bKCqE9GvQ5tiVHn5rfn1r+yao3aLQEaLzkkmAkf+A6Y= sigs.k8s.io/structured-merge-diff/v4 v4.2.1/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/hack/access-token.sh b/hack/access-token.sh new file mode 100755 index 000000000000..cd99dfffa1a1 --- /dev/null +++ b/hack/access-token.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env sh +set -eu + +case $1 in + init) + kubectl delete role jenkins --ignore-not-found + kubectl create role jenkins --verb=create,list,watch --resource=workflows.argoproj.io + kubectl delete sa jenkins --ignore-not-found + kubectl create sa jenkins + kubectl delete rolebinding jenkins --ignore-not-found + kubectl create rolebinding jenkins --role=jenkins --serviceaccount=argo:jenkins + ;; + get) + SECRET=$(kubectl get sa jenkins -o=jsonpath='{.secrets[0].name}') + ARGO_TOKEN="Bearer $(kubectl get secret $SECRET -o=jsonpath='{.data.token}' | base64 --decode)" + + curl -s http://localhost:2746/api/v1/workflows/argo -H "Authorization: $ARGO_TOKEN" > /dev/null + + echo "$ARGO_TOKEN" + ;; + *) + exit 1 + ;; +esac diff --git a/hack/check-env-doc.sh b/hack/check-env-doc.sh index 9eb97c1568a5..bbb7b8fa1335 100755 --- a/hack/check-env-doc.sh +++ b/hack/check-env-doc.sh @@ -9,7 +9,7 @@ function check-used { var="${x%\`}"; var="${var#\`}"; if ! grep -qR --exclude="*_test.go" "$var" ./cmd/workflow-controller ./workflow ./persist ./util ./server ; then - echo "Documented variable $var in docs/environment-variables.md is not used anywhere"; + echo "❌ Documented variable $var in docs/environment-variables.md is not used anywhere" >&2; exit 1; fi; done @@ -24,10 +24,10 @@ function check-documented { var="${x%\"}"; var="${var#\"}"; if ! grep -q "$var" docs/environment-variables.md; then - echo "Variable $var not documented in docs/environment-variables.md"; + echo "❌ Variable $var not documented in docs/environment-variables.md" >&2; exit 1; fi; done } -check-used && check-documented && echo "Success!" +check-used && check-documented && echo "✅ Success - all environment variables appear to be documented" diff --git a/hack/check-mkdocs.sh b/hack/check-mkdocs.sh new file mode 100755 index 000000000000..046ef1df1494 --- /dev/null +++ b/hack/check-mkdocs.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env sh +set -eu + +echo "Checking all docs are listed in mkdocs.yml..."ß + +find docs -name '*.md' | grep -v "^docs/proposals" | sed 's|^docs/||' | while read -r f ; do + if ! grep -Fq "$f" mkdocs.yml; then + echo "❌ $f is missing from mkdocs.yml" >&2 + exit 1 + fi +done + +echo "✅ Success - all docs appear to be listed in mkdocs.yml" diff --git a/hack/docgen.go b/hack/docgen.go index 2f74870ade4b..3948424df7ff 100644 --- a/hack/docgen.go +++ b/hack/docgen.go @@ -307,7 +307,7 @@ func (c *DocGeneratorContext) getTemplate(key string) string { } var properties map[string]interface{} - def,ok := c.defs[key] + def, ok := c.defs[key] if !ok { return out } diff --git a/hack/git/hooks/commit-msg b/hack/git/hooks/commit-msg index ccd75d982661..bae720f594fc 100755 --- a/hack/git/hooks/commit-msg +++ b/hack/git/hooks/commit-msg @@ -2,12 +2,13 @@ set -eu grep -q 'Signed-off-by: ' "$1" || { - echo >&2 'Commits must be signed-off: https://probot.github.io/apps/dco/' + echo '❌Commits must be signed-off: https://probot.github.io/apps/dco/' >&2 exit 1 } grep -qE '^(?:build|feat|fix|docs|style|refactor|perf|test|ci|chore|revert)\(?(?:\w+|\s|\-|_)?\)?!?:\s\w+' "$1" || grep -q 'Merge' "$1" || { - echo >&2 'Commit message must be semantic: https://github.com/zeke/semantic-pull-requests' + echo >&2 '❌ Commit message must be semantic: https://github.com/zeke/semantic-pull-requests' >&2 exit 1 } +echo "✅ Commit looks good" \ No newline at end of file diff --git a/hack/git/hooks/pre-commit b/hack/git/hooks/pre-commit new file mode 100755 index 000000000000..2853f3e4caf9 --- /dev/null +++ b/hack/git/hooks/pre-commit @@ -0,0 +1,16 @@ +#!/bin/sh +set -eu + +m=30 + +if find dist/pre-commit -mmin -$m | grep .; then + echo "✅ \`make pre-commit -B\` was run in the last ${m}m" +else + echo "⚠️ \`make pre-commit -B\` has not been for over ${m}m" +fi + +if find dist/test -mmin -${m} | grep .; then + echo "✅ \`make test\` was run in the last ${m}m" +else + echo "⚠️️ \`make test\` has not been run for over ${m}m" +fi \ No newline at end of file diff --git a/hack/null_docgen.go b/hack/null_docgen.go index 588715d0e157..e897e9ad3f62 100644 --- a/hack/null_docgen.go +++ b/hack/null_docgen.go @@ -1,3 +1,4 @@ +//go:build fields // +build fields package main diff --git a/hack/port-forward.sh b/hack/port-forward.sh index 67ff2a207319..a1c9479603b2 100755 --- a/hack/port-forward.sh +++ b/hack/port-forward.sh @@ -20,6 +20,7 @@ wait-for() { wait-for minio pf minio 9000 +pf minio 9001 dex=$(kubectl -n argo get pod -l app=dex -o name) if [[ "$dex" != "" ]]; then @@ -56,3 +57,9 @@ if [[ "$(kubectl -n argo get pod -l app=prometheus -o name)" != "" ]]; then wait-for prometheus pf prometheus 9091 9090 fi + +azurite=$(kubectl -n argo get pod -l app=azurite -o name) +if [[ "$azurite" != "" ]]; then + wait-for azurite + pf azurite 10000 +fi diff --git a/hack/release-notes.md b/hack/release-notes.md index 7de13490f772..876da8c8ae97 100644 --- a/hack/release-notes.md +++ b/hack/release-notes.md @@ -6,7 +6,7 @@ Find out on [our blog](https://blog.argoproj.io) and [changelog](https://github. ## Breaking Changes and Known Issues -See [upgrading](https://github.com/argoproj/argo-workflows/blob/master/docs/upgrading.md). +Can be found in the [installation guide](https://argoproj.github.io/argo-workflows/installation/). ## Installation @@ -16,7 +16,7 @@ See [upgrading](https://github.com/argoproj/argo-workflows/blob/master/docs/upgr Available via `curl` -```sh +```bash # Download the binary curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${version}/argo-darwin-amd64.gz @@ -37,7 +37,7 @@ argo version Available via `curl` -```sh +```bash # Download the binary curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${version}/argo-linux-amd64.gz @@ -56,7 +56,7 @@ argo version ### Controller and Server -```sh +```bash kubectl create namespace argo kubectl apply -n argo -f https://github.com/argoproj/argo-workflows/releases/download/${version}/install.yaml ``` diff --git a/hack/test-examples.sh b/hack/test-examples.sh index 62cb4acad5b5..4b46fdc52a40 100755 --- a/hack/test-examples.sh +++ b/hack/test-examples.sh @@ -3,8 +3,6 @@ set -eu -o pipefail # Load the configmaps that contains the parameter values used for certain examples. kubectl apply -f examples/configmaps/simple-parameters-configmap.yaml -# Needed for examples/selected-executor-workflow.yaml. -kubectl apply -f manifests/quick-start/base/executor/pns/executor-role.yaml echo "Checking for banned images..." grep -lR 'workflows.argoproj.io/test' examples/* | while read f ; do @@ -12,6 +10,8 @@ grep -lR 'workflows.argoproj.io/test' examples/* | while read f ; do test 0 == $(grep -o 'image: .*' $f | grep -cv 'argoproj/argosay:v2\|python:alpine3.6') done +trap 'kubectl get wf' EXIT + grep -lR 'workflows.argoproj.io/test' examples/* | while read f ; do kubectl delete workflow -l workflows.argoproj.io/test echo "Running $f..." diff --git a/hack/what-to-cherry-pick.sh b/hack/what-to-cherry-pick.sh new file mode 100755 index 000000000000..fd1ee0883f27 --- /dev/null +++ b/hack/what-to-cherry-pick.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env sh +set -eu +# this script prints out a list a commits that are on master that you should probably cherry-pick to the release branch + +br=$1;# branch + +# find the branch point +base=$(git merge-base $br master) + +# extract the PRs from stdin +prNo() { + set -eu + cat | sed 's|.*(\(#[0-9]*\))|\1|' +} + +# list the PRs on each branch +prs() { + set -eu + git log --format=%s --grep '^fix:.*(#' $1...$2 | prNo | sort > /tmp/$2 +} + +prs $base $br +prs $base master + +# find PRs added to master +diff /tmp/$br /tmp/master | grep '^> ' | cut -c 3- > /tmp/prs + +# print all the commits that need cherry-picking +git log --oneline --grep '^fix:.*(#' $base...master | while read -r m; do + grep -q "$(echo $m | prNo)" /tmp/prs && echo $m +done \ No newline at end of file diff --git a/manifests/base/argo-server/argo-server-deployment.yaml b/manifests/base/argo-server/argo-server-deployment.yaml index 8956020e9ee2..e3672fbe16a2 100644 --- a/manifests/base/argo-server/argo-server-deployment.yaml +++ b/manifests/base/argo-server/argo-server-deployment.yaml @@ -16,6 +16,9 @@ spec: - name: argo-server image: quay.io/argoproj/argocli:latest securityContext: + readOnlyRootFilesystem: true + runAsNonRoot: true + allowPrivilegeEscalation: false capabilities: drop: - ALL diff --git a/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml b/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml index 0e6fd5a25fbe..304819ab9df8 100644 --- a/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml +++ b/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml @@ -406,6 +406,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -435,6 +458,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -461,6 +512,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -487,6 +540,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -570,6 +625,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -773,6 +932,29 @@ spec: type: object type: array type: object + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactRepositoryRef: properties: configMap: @@ -834,6 +1016,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -863,6 +1068,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -889,6 +1122,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -915,6 +1150,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -998,85 +1235,189 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + auth: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object recurseMode: type: boolean s3: @@ -1214,8 +1555,6 @@ spec: template: type: string type: object - required: - - template type: object type: object hostAliases: @@ -1909,6 +2248,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -1931,6 +2296,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -1957,6 +2324,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2038,70 +2407,174 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - raw: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + raw: properties: data: type: string @@ -3419,6 +3892,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3448,6 +3944,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3474,6 +3998,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3500,6 +4026,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3583,74 +4111,178 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: - key type: object path: @@ -3822,6 +4454,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3851,6 +4506,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3877,6 +4560,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3903,6 +4588,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3986,6 +4673,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4202,8 +4993,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -4278,6 +5067,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -4307,6 +5119,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -4333,6 +5173,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -4359,6 +5201,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4442,26 +5286,130 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: @@ -4628,6 +5576,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -5276,6 +6230,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5305,6 +6282,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -5331,6 +6336,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5357,6 +6364,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5440,6 +6449,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -5767,6 +6880,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5796,9 +6932,37 @@ spec: required: - url type: object - from: - type: string - fromExpression: + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: type: string gcs: properties: @@ -5822,6 +6986,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5848,6 +7014,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5931,6 +7099,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -6162,247 +7434,742 @@ spec: type: array manifest: type: string - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: + manifestFrom: properties: - postStart: + artifact: properties: - exec: + archive: properties: - command: - items: - type: string - type: array + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object type: object - httpGet: + archiveLogs: + type: boolean + artifactGC: properties: - host: - type: string - httpHeaders: - items: - properties: - name: + podMetadata: + properties: + annotations: + additionalProperties: type: string - value: + type: object + labels: + additionalProperties: type: string - required: - - name - - value - type: object - type: array - path: + type: object + type: object + serviceAccountName: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - url type: object - tcpSocket: + azure: properties: - host: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean required: - - port + - blob + - container + - endpoint type: object - type: object - preStop: - properties: - exec: + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: properties: - command: - items: - type: string - type: array + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - httpGet: + git: properties: - host: + branch: type: string - httpHeaders: + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object + type: string type: array - path: + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + revision: type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - repo type: object - tcpSocket: + globalName: + type: string + hdfs: properties: - host: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - path type: object - type: object - type: object - livenessProbe: - properties: - exec: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object + mergeStrategy: + type: string + setOwnerReference: + type: boolean + successCondition: + type: string + required: + - action + type: object + retryStrategy: + properties: + affinity: + properties: + nodeAntiAffinity: + type: object + type: object + backoff: + properties: + duration: + type: string + factor: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + maxDuration: + type: string + type: object + expression: + type: string + limit: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + retryPolicy: + type: string + type: object + schedulerName: + type: string + script: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + fieldRef: + properties: + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource + type: object + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: + type: string + optional: + type: boolean + type: object + prefix: + type: string + secretRef: + properties: + name: + type: string + optional: + type: boolean + type: object + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: properties: command: items: @@ -8570,6 +10337,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -8592,6 +10385,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -8618,6 +10413,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -8699,6 +10496,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10080,6 +11981,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10109,6 +12033,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10135,6 +12087,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10161,6 +12115,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10244,6 +12200,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10483,6 +12543,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10510,8 +12593,36 @@ spec: - key type: object required: - - url + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10538,6 +12649,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10564,6 +12677,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10647,6 +12762,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10863,8 +13082,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -10939,6 +13156,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10968,6 +13208,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10994,6 +13262,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -11020,6 +13290,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -11031,7 +13303,237 @@ spec: required: - key type: object - usernameSecret: + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11042,22 +13544,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - repo + - key type: object - globalName: + path: type: string - hdfs: + raw: properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: + data: type: string - krbCCacheSecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -11068,18 +13573,44 @@ spec: required: - key type: object - krbConfigConfigMap: + bucket: + type: string + createBucketIfNotPresent: properties: - key: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - name: + kmsKeyId: type: string - optional: - type: boolean - required: - - key + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - krbKeytabSecret: + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -11090,46 +13621,116 @@ spec: required: - key type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url + useSDKCreds: + type: boolean type: object - mode: - format: int32 - type: integer + subPath: + type: string + required: + - name + type: object + type: object + transformation: + items: + properties: + expression: + type: string + required: + - expression + type: object + type: array + required: + - source + - transformation + type: object + executor: + properties: + serviceAccountName: + type: string + type: object + failFast: + type: boolean + hostAliases: + items: + properties: + hostnames: + items: + type: string + type: array + ip: + type: string + type: object + type: array + http: + properties: + body: + type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + insecureSkipVerify: + type: boolean + method: + type: string + successCondition: + type: string + timeoutSeconds: + format: int64 + type: integer + url: + type: string + required: + - url + type: object + initContainers: + items: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: name: type: string - optional: - type: boolean - oss: + value: + type: string + valueFrom: properties: - accessKeySecret: + configMapKeyRef: properties: key: type: string @@ -11140,24 +13741,31 @@ spec: required: - key type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + fieldRef: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource type: object - secretKeySecret: + secretKeyRef: properties: key: type: string @@ -11168,137 +13776,745 @@ spec: required: - key type: object - securityToken: + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: type: string - required: - - key + optional: + type: boolean type: object - path: + prefix: type: string - raw: + secretRef: properties: - data: + name: type: string - required: - - data + optional: + type: boolean type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - bucket: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + mirrorVolumeMounts: + type: boolean + name: + type: string + ports: + items: + properties: + containerPort: + format: int32 + type: integer + hostIP: + type: string + hostPort: + format: int32 + type: integer + name: + type: string + protocol: + default: TCP + type: string + required: + - containerPort + type: object + type: array + x-kubernetes-list-map-keys: + - containerPort + - protocol + x-kubernetes-list-type: map + readinessProbe: + properties: + exec: + properties: + command: + items: type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - enableEncryption: - type: boolean - kmsEncryptionContext: + name: type: string - kmsKeyId: + value: type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object + required: + - name + - value type: object - endpoint: - type: string - insecure: - type: boolean - key: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + resources: + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + securityContext: + properties: + allowPrivilegeEscalation: + type: boolean + capabilities: + properties: + add: + items: type: string - region: + type: array + drop: + items: type: string - roleARN: + type: array + type: object + privileged: + type: boolean + procMount: + type: string + readOnlyRootFilesystem: + type: boolean + runAsGroup: + format: int64 + type: integer + runAsNonRoot: + type: boolean + runAsUser: + format: int64 + type: integer + seLinuxOptions: + properties: + level: + type: string + role: + type: string + type: + type: string + user: + type: string + type: object + seccompProfile: + properties: + localhostProfile: + type: string + type: + type: string + required: + - type + type: object + windowsOptions: + properties: + gmsaCredentialSpec: + type: string + gmsaCredentialSpecName: + type: string + hostProcess: + type: boolean + runAsUserName: + type: string + type: object + type: object + startupProbe: + properties: + exec: + properties: + command: + items: type: string - secretKeySecret: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - useSDKCreds: - type: boolean - type: object - subPath: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + stdin: + type: boolean + stdinOnce: + type: boolean + terminationMessagePath: + type: string + terminationMessagePolicy: + type: string + tty: + type: boolean + volumeDevices: + items: + properties: + devicePath: + type: string + name: type: string required: + - devicePath - name type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: + type: array + volumeMounts: items: - type: string + properties: + mountPath: + type: string + mountPropagation: + type: string + name: + type: string + readOnly: + type: boolean + subPath: + type: string + subPathExpr: + type: string + required: + - mountPath + - name + type: object type: array - ip: + workingDir: type: string + required: + - name type: object type: array - http: + inputs: properties: - body: - type: string - headers: + artifacts: items: properties: - name: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: type: string - value: + fromExpression: type: string - valueFrom: + gcs: properties: - secretKeyRef: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: properties: key: type: string @@ -11309,515 +14525,774 @@ spec: required: - key type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: + value: type: string - optional: - type: boolean required: - - key + - name + - value type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: properties: - host: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + name: + type: string + optional: + type: boolean required: - - port + - key type: object + securityToken: + type: string + required: + - key type: object - preStop: + path: + type: string + raw: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: properties: - host: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: + endpoint: type: string - required: - - port - type: object - httpGet: - properties: - host: + insecure: + type: boolean + key: type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + region: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + roleARN: type: string - required: - - port + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + subPath: + type: string + required: + - name + type: object + type: array + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: properties: - host: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + default: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer + required: + - name type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: + type: array + type: object + memoize: + properties: + cache: + properties: + configMap: properties: - containerPort: - format: int32 - type: integer - hostIP: + key: type: string - hostPort: - format: int32 - type: integer name: type: string - protocol: - default: TCP - type: string + optional: + type: boolean required: - - containerPort + - key type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: + required: + - configMap + type: object + key: + type: string + maxAge: + type: string + required: + - cache + - key + - maxAge + type: object + metadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + metrics: + properties: + prometheus: + items: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: + counter: properties: - port: - format: int32 - type: integer - service: + value: type: string required: - - port + - value type: object - httpGet: + gauge: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + realtime: + type: boolean + value: type: string required: - - port + - realtime + - value type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + help: + type: string + histogram: properties: - host: + buckets: + items: + type: number + type: array + value: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + - buckets + - value type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: + labels: + items: + properties: + key: type: string - type: array - drop: - items: + value: type: string - type: array - type: object - privileged: - type: boolean - procMount: + required: + - key + - value + type: object + type: array + name: type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: + when: + type: string + required: + - help + - name + type: object + type: array + required: + - prometheus + type: object + name: + type: string + nodeSelector: + additionalProperties: + type: string + type: object + outputs: + properties: + artifacts: + items: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: + artifactGC: properties: - level: - type: string - role: - type: string - type: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: type: string - user: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never type: string type: object - seccompProfile: + artifactory: properties: - localhostProfile: - type: string - type: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - type + - url type: object - windowsOptions: + azure: properties: - gmsaCredentialSpec: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - gmsaCredentialSpecName: + container: type: string - hostProcess: + endpoint: + type: string + useSDKCreds: type: boolean - runAsUserName: + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - type: object - startupProbe: - properties: - exec: + git: properties: - command: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: type: string type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo type: object - failureThreshold: - format: int32 - type: integer - grpc: + globalName: + type: string + hdfs: properties: - port: - format: int32 - type: integer - service: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string required: - - port + - path type: object - httpGet: + http: properties: - host: - type: string - httpHeaders: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: items: properties: name: @@ -11829,117 +15304,21 @@ spec: - value type: object type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: + url: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - url type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: + mode: format: int32 type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: + name: + type: string + optional: type: boolean - artifactory: + oss: properties: - passwordSecret: + accessKeySecret: properties: key: type: string @@ -11950,9 +15329,24 @@ spec: required: - key type: object - url: + bucket: type: string - usernameSecret: + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11963,20 +15357,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - url + - key type: object - from: - type: string - fromExpression: + path: type: string - gcs: + raw: properties: - bucket: - type: string - key: + data: type: string - serviceAccountKeySecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -11987,23 +15386,44 @@ spec: required: - key type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: type: boolean - passwordSecret: + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -12014,11 +15434,37 @@ spec: required: - key type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + type: array + exitCode: + type: string + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: properties: key: type: string @@ -12029,800 +15475,549 @@ spec: required: - key type: object - usernameSecret: - properties: - key: + default: + type: string + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object + type: object + required: + - name + type: object + type: array + result: + type: string + type: object + parallelism: + format: int64 + type: integer + plugin: + type: object + podSpecPatch: + type: string + priority: + format: int32 + type: integer + priorityClassName: + type: string + resource: + properties: + action: + type: string + failureCondition: + type: string + flags: + items: + type: string + type: array + manifest: + type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: type: string - name: + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + type: array + force: + type: boolean + hdfsUser: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + krbCCacheSecret: properties: + key: + type: string name: type: string - value: + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: + oss: properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string key: type: string - value: + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: type: string required: - key - - value type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: + path: + type: string + raw: + properties: + data: type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: - name: + key: type: string - value: + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: + useSDKCreds: + type: boolean + type: object + subPath: type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: diff --git a/manifests/base/crds/full/argoproj.io_cronworkflows.yaml b/manifests/base/crds/full/argoproj.io_cronworkflows.yaml index b72876920f16..db6d4c5c741d 100644 --- a/manifests/base/crds/full/argoproj.io_cronworkflows.yaml +++ b/manifests/base/crds/full/argoproj.io_cronworkflows.yaml @@ -427,6 +427,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -456,6 +479,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -482,6 +533,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -508,6 +561,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -591,6 +646,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -794,6 +953,29 @@ spec: type: object type: array type: object + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactRepositoryRef: properties: configMap: @@ -855,6 +1037,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -884,6 +1089,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -910,6 +1143,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -936,6 +1171,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -1019,85 +1256,189 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + auth: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object recurseMode: type: boolean s3: @@ -1235,8 +1576,6 @@ spec: template: type: string type: object - required: - - template type: object type: object hostAliases: @@ -1930,6 +2269,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -1952,6 +2317,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -1978,6 +2345,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2059,70 +2428,174 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - raw: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + raw: properties: data: type: string @@ -3440,6 +3913,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3469,6 +3965,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3495,6 +4019,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3521,6 +4047,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3604,74 +4132,178 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: - key type: object path: @@ -3843,6 +4475,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3872,6 +4527,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3898,6 +4581,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3924,6 +4609,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4007,6 +4694,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4223,8 +5014,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -4299,6 +5088,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -4328,6 +5140,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -4354,6 +5194,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -4380,6 +5222,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4463,26 +5307,130 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: @@ -4649,6 +5597,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -5297,6 +6251,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5326,6 +6303,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -5352,6 +6357,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5378,6 +6385,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5461,6 +6470,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -5788,6 +6901,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5817,9 +6953,37 @@ spec: required: - url type: object - from: - type: string - fromExpression: + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: type: string gcs: properties: @@ -5843,6 +7007,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5869,6 +7035,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5952,6 +7120,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -6183,247 +7455,742 @@ spec: type: array manifest: type: string - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: + manifestFrom: properties: - postStart: + artifact: properties: - exec: + archive: properties: - command: - items: - type: string - type: array + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object type: object - httpGet: + archiveLogs: + type: boolean + artifactGC: properties: - host: - type: string - httpHeaders: - items: - properties: - name: + podMetadata: + properties: + annotations: + additionalProperties: type: string - value: + type: object + labels: + additionalProperties: type: string - required: - - name - - value - type: object - type: array - path: + type: object + type: object + serviceAccountName: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - url type: object - tcpSocket: + azure: properties: - host: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean required: - - port + - blob + - container + - endpoint type: object - type: object - preStop: - properties: - exec: + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: properties: - command: - items: - type: string - type: array + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - httpGet: + git: properties: - host: + branch: type: string - httpHeaders: + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object + type: string type: array - path: + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + revision: type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - repo type: object - tcpSocket: + globalName: + type: string + hdfs: properties: - host: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - path type: object - type: object - type: object - livenessProbe: - properties: - exec: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object + mergeStrategy: + type: string + setOwnerReference: + type: boolean + successCondition: + type: string + required: + - action + type: object + retryStrategy: + properties: + affinity: + properties: + nodeAntiAffinity: + type: object + type: object + backoff: + properties: + duration: + type: string + factor: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + maxDuration: + type: string + type: object + expression: + type: string + limit: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + retryPolicy: + type: string + type: object + schedulerName: + type: string + script: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + fieldRef: + properties: + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource + type: object + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: + type: string + optional: + type: boolean + type: object + prefix: + type: string + secretRef: + properties: + name: + type: string + optional: + type: boolean + type: object + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: properties: command: items: @@ -8591,6 +10358,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -8613,6 +10406,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -8639,6 +10434,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -8720,6 +10517,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10101,6 +12002,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10130,6 +12054,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10156,6 +12108,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10182,6 +12136,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10265,6 +12221,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10504,6 +12564,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10531,8 +12614,36 @@ spec: - key type: object required: - - url + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10559,6 +12670,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10585,6 +12698,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10668,6 +12783,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10884,8 +13103,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -10960,6 +13177,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10989,6 +13229,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -11015,6 +13283,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -11041,6 +13311,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -11052,7 +13324,237 @@ spec: required: - key type: object - usernameSecret: + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11063,22 +13565,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - repo + - key type: object - globalName: + path: type: string - hdfs: + raw: properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: + data: type: string - krbCCacheSecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -11089,18 +13594,44 @@ spec: required: - key type: object - krbConfigConfigMap: + bucket: + type: string + createBucketIfNotPresent: properties: - key: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - name: + kmsKeyId: type: string - optional: - type: boolean - required: - - key + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - krbKeytabSecret: + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -11111,46 +13642,116 @@ spec: required: - key type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url + useSDKCreds: + type: boolean type: object - mode: - format: int32 - type: integer + subPath: + type: string + required: + - name + type: object + type: object + transformation: + items: + properties: + expression: + type: string + required: + - expression + type: object + type: array + required: + - source + - transformation + type: object + executor: + properties: + serviceAccountName: + type: string + type: object + failFast: + type: boolean + hostAliases: + items: + properties: + hostnames: + items: + type: string + type: array + ip: + type: string + type: object + type: array + http: + properties: + body: + type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + insecureSkipVerify: + type: boolean + method: + type: string + successCondition: + type: string + timeoutSeconds: + format: int64 + type: integer + url: + type: string + required: + - url + type: object + initContainers: + items: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: name: type: string - optional: - type: boolean - oss: + value: + type: string + valueFrom: properties: - accessKeySecret: + configMapKeyRef: properties: key: type: string @@ -11161,24 +13762,31 @@ spec: required: - key type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + fieldRef: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource type: object - secretKeySecret: + secretKeyRef: properties: key: type: string @@ -11189,137 +13797,745 @@ spec: required: - key type: object - securityToken: + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: type: string - required: - - key + optional: + type: boolean type: object - path: + prefix: type: string - raw: + secretRef: properties: - data: + name: type: string - required: - - data + optional: + type: boolean type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - bucket: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + mirrorVolumeMounts: + type: boolean + name: + type: string + ports: + items: + properties: + containerPort: + format: int32 + type: integer + hostIP: + type: string + hostPort: + format: int32 + type: integer + name: + type: string + protocol: + default: TCP + type: string + required: + - containerPort + type: object + type: array + x-kubernetes-list-map-keys: + - containerPort + - protocol + x-kubernetes-list-type: map + readinessProbe: + properties: + exec: + properties: + command: + items: type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - enableEncryption: - type: boolean - kmsEncryptionContext: + name: type: string - kmsKeyId: + value: type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object + required: + - name + - value type: object - endpoint: - type: string - insecure: - type: boolean - key: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + resources: + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + securityContext: + properties: + allowPrivilegeEscalation: + type: boolean + capabilities: + properties: + add: + items: type: string - region: + type: array + drop: + items: type: string - roleARN: + type: array + type: object + privileged: + type: boolean + procMount: + type: string + readOnlyRootFilesystem: + type: boolean + runAsGroup: + format: int64 + type: integer + runAsNonRoot: + type: boolean + runAsUser: + format: int64 + type: integer + seLinuxOptions: + properties: + level: + type: string + role: + type: string + type: + type: string + user: + type: string + type: object + seccompProfile: + properties: + localhostProfile: + type: string + type: + type: string + required: + - type + type: object + windowsOptions: + properties: + gmsaCredentialSpec: + type: string + gmsaCredentialSpecName: + type: string + hostProcess: + type: boolean + runAsUserName: + type: string + type: object + type: object + startupProbe: + properties: + exec: + properties: + command: + items: type: string - secretKeySecret: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - useSDKCreds: - type: boolean - type: object - subPath: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + stdin: + type: boolean + stdinOnce: + type: boolean + terminationMessagePath: + type: string + terminationMessagePolicy: + type: string + tty: + type: boolean + volumeDevices: + items: + properties: + devicePath: + type: string + name: type: string required: + - devicePath - name type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: + type: array + volumeMounts: items: - type: string + properties: + mountPath: + type: string + mountPropagation: + type: string + name: + type: string + readOnly: + type: boolean + subPath: + type: string + subPathExpr: + type: string + required: + - mountPath + - name + type: object type: array - ip: + workingDir: type: string + required: + - name type: object type: array - http: + inputs: properties: - body: - type: string - headers: + artifacts: items: properties: - name: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: type: string - value: + fromExpression: type: string - valueFrom: + gcs: properties: - secretKeyRef: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: properties: key: type: string @@ -11330,515 +14546,774 @@ spec: required: - key type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: + value: type: string - optional: - type: boolean required: - - key + - name + - value type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: properties: - host: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + name: + type: string + optional: + type: boolean required: - - port + - key type: object + securityToken: + type: string + required: + - key type: object - preStop: + path: + type: string + raw: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: properties: - host: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: + endpoint: type: string - required: - - port - type: object - httpGet: - properties: - host: + insecure: + type: boolean + key: type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + region: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + roleARN: type: string - required: - - port + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + subPath: + type: string + required: + - name + type: object + type: array + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: properties: - host: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + default: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer + required: + - name type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: + type: array + type: object + memoize: + properties: + cache: + properties: + configMap: properties: - containerPort: - format: int32 - type: integer - hostIP: + key: type: string - hostPort: - format: int32 - type: integer name: type: string - protocol: - default: TCP - type: string + optional: + type: boolean required: - - containerPort + - key type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: + required: + - configMap + type: object + key: + type: string + maxAge: + type: string + required: + - cache + - key + - maxAge + type: object + metadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + metrics: + properties: + prometheus: + items: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: + counter: properties: - port: - format: int32 - type: integer - service: + value: type: string required: - - port + - value type: object - httpGet: + gauge: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + realtime: + type: boolean + value: type: string required: - - port + - realtime + - value type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + help: + type: string + histogram: properties: - host: + buckets: + items: + type: number + type: array + value: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + - buckets + - value type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: + labels: + items: + properties: + key: type: string - type: array - drop: - items: + value: type: string - type: array - type: object - privileged: - type: boolean - procMount: + required: + - key + - value + type: object + type: array + name: type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: + when: + type: string + required: + - help + - name + type: object + type: array + required: + - prometheus + type: object + name: + type: string + nodeSelector: + additionalProperties: + type: string + type: object + outputs: + properties: + artifacts: + items: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: + artifactGC: properties: - level: - type: string - role: - type: string - type: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: type: string - user: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never type: string type: object - seccompProfile: + artifactory: properties: - localhostProfile: - type: string - type: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - type + - url type: object - windowsOptions: + azure: properties: - gmsaCredentialSpec: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - gmsaCredentialSpecName: + container: type: string - hostProcess: + endpoint: + type: string + useSDKCreds: type: boolean - runAsUserName: + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - type: object - startupProbe: - properties: - exec: + git: properties: - command: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: type: string type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo type: object - failureThreshold: - format: int32 - type: integer - grpc: + globalName: + type: string + hdfs: properties: - port: - format: int32 - type: integer - service: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string required: - - port + - path type: object - httpGet: + http: properties: - host: - type: string - httpHeaders: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: items: properties: name: @@ -11850,117 +15325,21 @@ spec: - value type: object type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: + url: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - url type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: + mode: format: int32 type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: + name: + type: string + optional: type: boolean - artifactory: + oss: properties: - passwordSecret: + accessKeySecret: properties: key: type: string @@ -11971,9 +15350,24 @@ spec: required: - key type: object - url: + bucket: type: string - usernameSecret: + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11984,20 +15378,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - url + - key type: object - from: - type: string - fromExpression: + path: type: string - gcs: + raw: properties: - bucket: - type: string - key: + data: type: string - serviceAccountKeySecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -12008,23 +15407,44 @@ spec: required: - key type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: type: boolean - passwordSecret: + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -12035,11 +15455,37 @@ spec: required: - key type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + type: array + exitCode: + type: string + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: properties: key: type: string @@ -12050,800 +15496,549 @@ spec: required: - key type: object - usernameSecret: - properties: - key: + default: + type: string + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object + type: object + required: + - name + type: object + type: array + result: + type: string + type: object + parallelism: + format: int64 + type: integer + plugin: + type: object + podSpecPatch: + type: string + priority: + format: int32 + type: integer + priorityClassName: + type: string + resource: + properties: + action: + type: string + failureCondition: + type: string + flags: + items: + type: string + type: array + manifest: + type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: type: string - name: + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + type: array + force: + type: boolean + hdfsUser: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + krbCCacheSecret: properties: + key: + type: string name: type: string - value: + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: + oss: properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string key: type: string - value: + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: type: string required: - key - - value type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: + path: + type: string + raw: + properties: + data: type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: - name: + key: type: string - value: + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: + useSDKCreds: + type: boolean + type: object + subPath: type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: diff --git a/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml b/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml new file mode 100644 index 000000000000..74827de1693e --- /dev/null +++ b/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml @@ -0,0 +1,986 @@ +# This is an auto-generated file. DO NOT EDIT +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: workflowartifactgctasks.argoproj.io +spec: + group: argoproj.io + names: + kind: WorkflowArtifactGCTask + listKind: WorkflowArtifactGCTaskList + plural: workflowartifactgctasks + shortNames: + - wfat + singular: workflowartifactgctask + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + properties: + artifactsByNode: + additionalProperties: + properties: + archiveLocation: + properties: + archiveLogs: + type: boolean + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + raw: + properties: + data: + type: string + required: + - data + type: object + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + type: object + artifacts: + additionalProperties: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + type: object + type: object + type: object + type: object + status: + properties: + artifactResultsByNode: + additionalProperties: + properties: + artifactResults: + additionalProperties: + properties: + error: + type: string + name: + type: string + success: + type: boolean + required: + - name + type: object + type: object + type: object + type: object + type: object + required: + - metadata + - spec + type: object + served: true + storage: true + subresources: + status: {} diff --git a/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml b/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml index 90e24464c06a..8e18ab681dec 100644 --- a/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml +++ b/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml @@ -55,6 +55,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -84,6 +107,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -110,6 +161,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -136,6 +189,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -219,6 +274,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: diff --git a/manifests/base/crds/full/argoproj.io_workflows.yaml b/manifests/base/crds/full/argoproj.io_workflows.yaml index 87770a465076..cecd175d765d 100644 --- a/manifests/base/crds/full/argoproj.io_workflows.yaml +++ b/manifests/base/crds/full/argoproj.io_workflows.yaml @@ -24,6 +24,11 @@ spec: jsonPath: .status.startedAt name: Age type: date + - description: Human readable message indicating details about why the workflow + is in this condition. + jsonPath: .status.message + name: Message + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -415,6 +420,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -444,6 +472,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -470,6 +526,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -496,6 +554,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -579,6 +639,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -782,6 +946,29 @@ spec: type: object type: array type: object + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactRepositoryRef: properties: configMap: @@ -843,6 +1030,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -872,6 +1082,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -898,6 +1136,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -924,6 +1164,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -1007,6 +1249,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -1223,8 +1569,6 @@ spec: template: type: string type: object - required: - - template type: object type: object hostAliases: @@ -1918,6 +2262,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -1940,6 +2310,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -1966,6 +2338,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2047,6 +2421,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -3428,6 +3906,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3457,6 +3958,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3483,6 +4012,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3509,6 +4040,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3592,6 +4125,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -3831,6 +4468,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3860,6 +4520,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3886,6 +4574,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3912,6 +4602,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3995,6 +4687,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4211,8 +5007,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -4287,6 +5081,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -4316,6 +5133,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -4342,6 +5187,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -4368,6 +5215,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4451,6 +5300,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4637,6 +5590,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -5285,6 +6244,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5314,6 +6296,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -5340,6 +6350,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5366,6 +6378,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5449,6 +6463,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -5776,6 +6894,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5805,6 +6946,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -5831,6 +7000,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5857,6 +7028,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5940,6 +7113,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -6171,280 +7448,775 @@ spec: type: array manifest: type: string - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: + manifestFrom: properties: - postStart: + artifact: properties: - exec: + archive: properties: - command: - items: - type: string - type: array + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object type: object - httpGet: + archiveLogs: + type: boolean + artifactGC: properties: - host: - type: string - httpHeaders: - items: - properties: - name: + podMetadata: + properties: + annotations: + additionalProperties: type: string - value: + type: object + labels: + additionalProperties: type: string - required: - - name - - value - type: object - type: array - path: + type: object + type: object + serviceAccountName: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never type: string - required: - - port type: object - tcpSocket: + artifactory: properties: - host: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - url type: object - type: object - preStop: - properties: - exec: + azure: properties: - command: - items: - type: string - type: array + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint type: object - httpGet: + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + bucket: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + key: type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - key type: object - tcpSocket: + git: properties: - host: + branch: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - repo type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: + globalName: type: string - httpHeaders: - items: - properties: - name: - type: string - value: + hdfs: + properties: + addresses: + items: type: string - required: - - name - - value + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object + mergeStrategy: + type: string + setOwnerReference: + type: boolean + successCondition: + type: string + required: + - action + type: object + retryStrategy: + properties: + affinity: + properties: + nodeAntiAffinity: + type: object + type: object + backoff: + properties: + duration: + type: string + factor: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + maxDuration: + type: string + type: object + expression: + type: string + limit: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + retryPolicy: + type: string + type: object + schedulerName: + type: string + script: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + fieldRef: + properties: + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource + type: object + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: + type: string + optional: + type: boolean + type: object + prefix: + type: string + secretRef: + properties: + name: + type: string + optional: + type: boolean + type: object + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value type: object type: array path: @@ -8579,6 +10351,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -8601,6 +10399,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -8627,6 +10427,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -8708,6 +10510,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10089,6 +11995,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10118,6 +12047,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10144,6 +12101,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10170,6 +12129,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10253,6 +12214,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10492,6 +12557,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10521,6 +12609,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10547,6 +12663,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10573,6 +12691,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10656,6 +12776,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10872,8 +13096,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -10948,6 +13170,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10977,6 +13222,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -11003,6 +13276,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -11029,6 +13304,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -11112,6 +13389,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -11298,6 +13679,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -11946,6 +14333,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -11975,6 +14385,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -12001,6 +14439,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -12027,6 +14467,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -12110,6 +14552,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -12437,6 +14983,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -12466,6 +15035,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -12492,6 +15089,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -12518,6 +15117,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -12601,6 +15202,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -12832,6 +15537,501 @@ spec: type: array manifest: type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: @@ -15728,6 +18928,19 @@ spec: type: object status: properties: + artifactGCStatus: + properties: + notSpecified: + type: boolean + podsRecouped: + additionalProperties: + type: boolean + type: object + strategiesProcessed: + additionalProperties: + type: boolean + type: object + type: object artifactRepositoryRef: properties: artifactRepository: @@ -15761,6 +18974,31 @@ spec: - key type: object type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blobNameFormat: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - container + - endpoint + type: object gcs: properties: bucket: @@ -16013,6 +19251,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -16042,6 +19303,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -16068,6 +19357,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -16094,6 +19385,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -16177,6 +19470,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -16421,6 +19818,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -16450,6 +19870,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -16476,6 +19924,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -16502,6 +19952,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -16585,6 +20037,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -16854,6 +20410,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -16883,6 +20462,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -16909,6 +20516,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -16935,6 +20544,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -17018,6 +20629,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -18312,6 +22027,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -18334,6 +22075,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -18360,6 +22103,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -18441,6 +22186,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -19822,6 +23671,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -19851,6 +23723,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -19877,6 +23777,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -19903,6 +23805,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -19986,6 +23890,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -20225,6 +24233,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -20254,6 +24285,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -20280,6 +24339,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -20306,6 +24367,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -20389,6 +24452,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -20605,8 +24772,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -20681,6 +24846,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -20710,6 +24898,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -20736,6 +24952,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -20762,6 +24980,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -20845,6 +25065,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -21031,6 +25355,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -21679,6 +26009,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -21708,6 +26061,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -21734,6 +26115,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -21760,6 +26143,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -21843,6 +26228,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -22170,6 +26659,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -22199,6 +26711,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -22225,6 +26765,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -22247,11 +26789,331 @@ spec: required: - key type: object - repo: + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: type: string - revision: + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: type: string - sshPrivateKeySecret: + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -22262,7 +27124,37 @@ spec: required: - key type: object - usernameSecret: + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + type: array + exitCode: + type: string + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: properties: key: type: string @@ -22273,298 +27165,549 @@ spec: required: - key type: object - required: - - repo + default: + type: string + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + required: + - name + type: object + type: array + result: + type: string + type: object + parallelism: + format: int64 + type: integer + plugin: + type: object + podSpecPatch: + type: string + priority: + format: int32 + type: integer + priorityClassName: + type: string + resource: + properties: + action: + type: string + failureCondition: + type: string + flags: + items: + type: string + type: array + manifest: + type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: + key: + type: string name: type: string - value: + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: + useSDKCreds: + type: boolean + type: object + subPath: type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: @@ -24960,6 +30103,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -24989,6 +30155,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -25015,6 +30209,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -25041,6 +30237,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -25124,6 +30322,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -25327,6 +30629,29 @@ spec: type: object type: array type: object + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactRepositoryRef: properties: configMap: @@ -25388,6 +30713,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -25417,6 +30765,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -25443,6 +30819,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -25469,6 +30847,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -25552,6 +30932,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -25768,8 +31252,6 @@ spec: template: type: string type: object - required: - - template type: object type: object hostAliases: @@ -26463,6 +31945,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -26485,6 +31993,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -26511,6 +32021,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -26592,6 +32104,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -27973,6 +33589,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -28002,6 +33641,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -28028,6 +33695,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -28054,6 +33723,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -28137,6 +33808,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -28376,6 +34151,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -28405,6 +34203,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -28431,6 +34257,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -28457,6 +34285,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -28540,6 +34370,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -28756,8 +34690,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -28832,6 +34764,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -28861,6 +34816,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -28887,6 +34870,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -28913,6 +34898,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -28996,6 +34983,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -29182,6 +35273,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -29830,6 +35927,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -29859,6 +35979,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -29885,6 +36033,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -29911,6 +36061,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -29994,6 +36146,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -30321,6 +36577,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -30350,6 +36629,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -30376,6 +36683,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -30402,6 +36711,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -30485,6 +36796,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -30716,6 +37131,501 @@ spec: type: array manifest: type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: @@ -33124,6 +40034,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -33146,6 +40082,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -33172,6 +40110,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -33253,6 +40193,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -34634,6 +41678,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -34663,6 +41730,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -34689,6 +41784,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -34715,6 +41812,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -34798,6 +41897,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -35037,6 +42240,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -35066,6 +42292,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -35092,6 +42346,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -35118,6 +42374,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -35201,6 +42459,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -35417,8 +42779,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -35493,6 +42853,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -35522,6 +42905,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -35548,6 +42959,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -35574,6 +42987,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -35657,6 +43072,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -35843,6 +43362,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -36491,6 +44016,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -36520,6 +44068,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -36546,6 +44122,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -36572,6 +44150,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -36655,6 +44235,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -36982,6 +44666,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -37011,6 +44718,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -37037,6 +44772,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -37063,6 +44800,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -37133,19 +44872,123 @@ spec: required: - key type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -37377,6 +45220,501 @@ spec: type: array manifest: type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: diff --git a/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml b/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml index 4b3409e40930..7303c9278559 100644 --- a/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml +++ b/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml @@ -44,6 +44,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -73,6 +96,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -99,6 +150,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -125,6 +178,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -208,6 +263,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: diff --git a/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml b/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml index cdb321b1a600..5dd6cc8c85d4 100644 --- a/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml +++ b/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml @@ -421,6 +421,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -443,6 +469,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -469,6 +497,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -550,6 +580,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -1931,6 +2065,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -1960,6 +2117,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -1986,6 +2171,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -2012,6 +2199,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2095,6 +2284,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -2334,6 +2627,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -2363,6 +2679,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -2389,6 +2733,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -2415,6 +2761,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2498,6 +2846,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -2714,8 +3166,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -2790,6 +3240,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -2819,6 +3292,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -2845,6 +3346,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -2871,6 +3374,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2954,6 +3459,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -3140,6 +3749,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -3788,6 +4403,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3817,6 +4455,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3843,6 +4509,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3869,6 +4537,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3952,6 +4622,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4279,6 +5053,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -4308,7 +5105,35 @@ spec: required: - url type: object - from: + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: type: string fromExpression: type: string @@ -4334,6 +5159,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -4360,6 +5187,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4443,6 +5272,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4606,74 +5539,569 @@ spec: enum: items: type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + default: + type: string + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object + type: object + required: + - name + type: object + type: array + result: + type: string + type: object + parallelism: + format: int64 + type: integer + plugin: + type: object + podSpecPatch: + type: string + priority: + format: int32 + type: integer + priorityClassName: + type: string + resource: + properties: + action: + type: string + failureCondition: + type: string + flags: + items: + type: string + type: array + manifest: + type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: @@ -6716,6 +8144,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -6745,6 +8196,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -6771,6 +8250,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -6797,6 +8278,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -6880,6 +8363,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: diff --git a/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml b/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml index 5d2096e812e1..0f794456fdca 100644 --- a/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml +++ b/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml @@ -405,6 +405,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -434,6 +457,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -460,6 +511,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -486,6 +539,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -569,6 +624,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -772,6 +931,29 @@ spec: type: object type: array type: object + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactRepositoryRef: properties: configMap: @@ -833,6 +1015,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -862,6 +1067,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -888,6 +1121,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -914,6 +1149,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -997,85 +1234,189 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + auth: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object recurseMode: type: boolean s3: @@ -1213,8 +1554,6 @@ spec: template: type: string type: object - required: - - template type: object type: object hostAliases: @@ -1908,6 +2247,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -1930,6 +2295,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -1956,6 +2323,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -2037,70 +2406,174 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - raw: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + raw: properties: data: type: string @@ -3418,6 +3891,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3447,6 +3943,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3473,6 +3997,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3499,6 +4025,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3582,74 +4110,178 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: + auth: properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: - key type: object path: @@ -3821,6 +4453,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -3850,6 +4505,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -3876,6 +4559,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -3902,6 +4587,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -3985,6 +4672,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -4201,8 +4992,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -4277,6 +5066,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -4306,6 +5118,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -4332,6 +5172,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -4358,6 +5200,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -4441,26 +5285,130 @@ spec: type: object http: properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: @@ -4627,6 +5575,12 @@ spec: properties: body: type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object headers: items: properties: @@ -5275,6 +6229,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5304,6 +6281,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -5330,6 +6335,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5356,6 +6363,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5439,6 +6448,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -5766,6 +6879,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -5795,9 +6931,37 @@ spec: required: - url type: object - from: - type: string - fromExpression: + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: type: string gcs: properties: @@ -5821,6 +6985,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -5847,6 +7013,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -5930,6 +7098,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -6161,247 +7433,742 @@ spec: type: array manifest: type: string - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: + manifestFrom: properties: - postStart: + artifact: properties: - exec: + archive: properties: - command: - items: - type: string - type: array + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object type: object - httpGet: + archiveLogs: + type: boolean + artifactGC: properties: - host: - type: string - httpHeaders: - items: - properties: - name: + podMetadata: + properties: + annotations: + additionalProperties: type: string - value: + type: object + labels: + additionalProperties: type: string - required: - - name - - value - type: object - type: array - path: + type: object + type: object + serviceAccountName: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - url type: object - tcpSocket: + azure: properties: - host: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean required: - - port + - blob + - container + - endpoint type: object - type: object - preStop: - properties: - exec: + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: properties: - command: - items: - type: string - type: array + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - httpGet: + git: properties: - host: + branch: type: string - httpHeaders: + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object + type: string type: array - path: + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + revision: type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - port + - repo type: object - tcpSocket: + globalName: + type: string + hdfs: properties: - host: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - path type: object - type: object - type: object - livenessProbe: - properties: - exec: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + required: + - key + type: object + path: + type: string + raw: + properties: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + required: + - artifact + type: object + mergeStrategy: + type: string + setOwnerReference: + type: boolean + successCondition: + type: string + required: + - action + type: object + retryStrategy: + properties: + affinity: + properties: + nodeAntiAffinity: + type: object + type: object + backoff: + properties: + duration: + type: string + factor: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + maxDuration: + type: string + type: object + expression: + type: string + limit: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + retryPolicy: + type: string + type: object + schedulerName: + type: string + script: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + fieldRef: + properties: + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource + type: object + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: + type: string + optional: + type: boolean + type: object + prefix: + type: string + secretRef: + properties: + name: + type: string + optional: + type: boolean + type: object + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: properties: command: items: @@ -8569,6 +10336,32 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object gcs: properties: bucket: @@ -8591,6 +10384,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -8617,6 +10412,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -8698,6 +10495,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10079,6 +11980,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10108,6 +12032,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10134,6 +12086,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10160,6 +12114,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10243,6 +12199,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10482,6 +12542,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10509,8 +12592,36 @@ spec: - key type: object required: - - url + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10537,6 +12648,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -10563,6 +12676,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -10646,6 +12761,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -10862,8 +13081,6 @@ spec: template: type: string type: object - required: - - template type: object type: object inline: {} @@ -10938,6 +13155,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -10967,6 +13207,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -10993,6 +13261,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -11019,6 +13289,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -11030,7 +13302,237 @@ spec: required: - key type: object - usernameSecret: + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11041,22 +13543,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - repo + - key type: object - globalName: + path: type: string - hdfs: + raw: properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: + data: type: string - krbCCacheSecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -11067,18 +13572,44 @@ spec: required: - key type: object - krbConfigConfigMap: + bucket: + type: string + createBucketIfNotPresent: properties: - key: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - name: + kmsKeyId: type: string - optional: - type: boolean - required: - - key + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - krbKeytabSecret: + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -11089,46 +13620,116 @@ spec: required: - key type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url + useSDKCreds: + type: boolean type: object - mode: - format: int32 - type: integer + subPath: + type: string + required: + - name + type: object + type: object + transformation: + items: + properties: + expression: + type: string + required: + - expression + type: object + type: array + required: + - source + - transformation + type: object + executor: + properties: + serviceAccountName: + type: string + type: object + failFast: + type: boolean + hostAliases: + items: + properties: + hostnames: + items: + type: string + type: array + ip: + type: string + type: object + type: array + http: + properties: + body: + type: string + bodyFrom: + properties: + bytes: + format: byte + type: string + type: object + headers: + items: + properties: + name: + type: string + value: + type: string + valueFrom: + properties: + secretKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + insecureSkipVerify: + type: boolean + method: + type: string + successCondition: + type: string + timeoutSeconds: + format: int64 + type: integer + url: + type: string + required: + - url + type: object + initContainers: + items: + properties: + args: + items: + type: string + type: array + command: + items: + type: string + type: array + env: + items: + properties: name: type: string - optional: - type: boolean - oss: + value: + type: string + valueFrom: properties: - accessKeySecret: + configMapKeyRef: properties: key: type: string @@ -11139,24 +13740,31 @@ spec: required: - key type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: + fieldRef: properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer + apiVersion: + type: string + fieldPath: + type: string + required: + - fieldPath + type: object + resourceFieldRef: + properties: + containerName: + type: string + divisor: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + type: string + required: + - resource type: object - secretKeySecret: + secretKeyRef: properties: key: type: string @@ -11167,137 +13775,745 @@ spec: required: - key type: object - securityToken: + type: object + required: + - name + type: object + type: array + envFrom: + items: + properties: + configMapRef: + properties: + name: type: string - required: - - key + optional: + type: boolean type: object - path: + prefix: type: string - raw: + secretRef: properties: - data: + name: type: string - required: - - data + optional: + type: boolean type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: + type: object + type: array + image: + type: string + imagePullPolicy: + type: string + lifecycle: + properties: + postStart: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + properties: + exec: + properties: + command: + items: type: string + type: array + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + properties: + exec: + properties: + command: + items: + type: string + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: + properties: name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - bucket: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + mirrorVolumeMounts: + type: boolean + name: + type: string + ports: + items: + properties: + containerPort: + format: int32 + type: integer + hostIP: + type: string + hostPort: + format: int32 + type: integer + name: + type: string + protocol: + default: TCP + type: string + required: + - containerPort + type: object + type: array + x-kubernetes-list-map-keys: + - containerPort + - protocol + x-kubernetes-list-type: map + readinessProbe: + properties: + exec: + properties: + command: + items: type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - enableEncryption: - type: boolean - kmsEncryptionContext: + name: type: string - kmsKeyId: + value: type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object + required: + - name + - value type: object - endpoint: - type: string - insecure: - type: boolean - key: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + resources: + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + securityContext: + properties: + allowPrivilegeEscalation: + type: boolean + capabilities: + properties: + add: + items: type: string - region: + type: array + drop: + items: type: string - roleARN: + type: array + type: object + privileged: + type: boolean + procMount: + type: string + readOnlyRootFilesystem: + type: boolean + runAsGroup: + format: int64 + type: integer + runAsNonRoot: + type: boolean + runAsUser: + format: int64 + type: integer + seLinuxOptions: + properties: + level: + type: string + role: + type: string + type: + type: string + user: + type: string + type: object + seccompProfile: + properties: + localhostProfile: + type: string + type: + type: string + required: + - type + type: object + windowsOptions: + properties: + gmsaCredentialSpec: + type: string + gmsaCredentialSpecName: + type: string + hostProcess: + type: boolean + runAsUserName: + type: string + type: object + type: object + startupProbe: + properties: + exec: + properties: + command: + items: type: string - secretKeySecret: + type: array + type: object + failureThreshold: + format: int32 + type: integer + grpc: + properties: + port: + format: int32 + type: integer + service: + type: string + required: + - port + type: object + httpGet: + properties: + host: + type: string + httpHeaders: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - useSDKCreds: - type: boolean - type: object - subPath: + type: array + path: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + scheme: + type: string + required: + - port + type: object + initialDelaySeconds: + format: int32 + type: integer + periodSeconds: + format: int32 + type: integer + successThreshold: + format: int32 + type: integer + tcpSocket: + properties: + host: + type: string + port: + anyOf: + - type: integer + - type: string + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + format: int64 + type: integer + timeoutSeconds: + format: int32 + type: integer + type: object + stdin: + type: boolean + stdinOnce: + type: boolean + terminationMessagePath: + type: string + terminationMessagePolicy: + type: string + tty: + type: boolean + volumeDevices: + items: + properties: + devicePath: + type: string + name: type: string required: + - devicePath - name type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: + type: array + volumeMounts: items: - type: string + properties: + mountPath: + type: string + mountPropagation: + type: string + name: + type: string + readOnly: + type: boolean + subPath: + type: string + subPathExpr: + type: string + required: + - mountPath + - name + type: object type: array - ip: + workingDir: type: string + required: + - name type: object type: array - http: + inputs: properties: - body: - type: string - headers: + artifacts: items: properties: - name: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: type: string - value: + fromExpression: type: string - valueFrom: + gcs: properties: - secretKeyRef: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: + type: string + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: properties: key: type: string @@ -11308,515 +14524,774 @@ spec: required: - key type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: + value: type: string - optional: - type: boolean required: - - key + - name + - value type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer + name: + type: string + optional: + type: boolean + oss: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: properties: - host: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true + name: + type: string + optional: + type: boolean required: - - port + - key type: object + securityToken: + type: string + required: + - key type: object - preStop: + path: + type: string + raw: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: + data: + type: string + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + key: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + name: type: string + optional: + type: boolean required: - - port + - key type: object - tcpSocket: + bucket: + type: string + createBucketIfNotPresent: properties: - host: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: + endpoint: type: string - required: - - port - type: object - httpGet: - properties: - host: + insecure: + type: boolean + key: type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: + region: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + roleARN: type: string - required: - - port + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + subPath: + type: string + required: + - name + type: object + type: array + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: properties: - host: + configMapKeyRef: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + default: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer + required: + - name type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: + type: array + type: object + memoize: + properties: + cache: + properties: + configMap: properties: - containerPort: - format: int32 - type: integer - hostIP: + key: type: string - hostPort: - format: int32 - type: integer name: type: string - protocol: - default: TCP - type: string + optional: + type: boolean required: - - containerPort + - key type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: + required: + - configMap + type: object + key: + type: string + maxAge: + type: string + required: + - cache + - key + - maxAge + type: object + metadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + metrics: + properties: + prometheus: + items: properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: + counter: properties: - port: - format: int32 - type: integer - service: + value: type: string required: - - port + - value type: object - httpGet: + gauge: properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: + realtime: + type: boolean + value: type: string required: - - port + - realtime + - value type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: + help: + type: string + histogram: properties: - host: + buckets: + items: + type: number + type: array + value: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + - buckets + - value type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: + labels: + items: + properties: + key: type: string - type: array - drop: - items: + value: type: string - type: array - type: object - privileged: - type: boolean - procMount: + required: + - key + - value + type: object + type: array + name: type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: + when: + type: string + required: + - help + - name + type: object + type: array + required: + - prometheus + type: object + name: + type: string + nodeSelector: + additionalProperties: + type: string + type: object + outputs: + properties: + artifacts: + items: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: + artifactGC: properties: - level: - type: string - role: - type: string - type: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: type: string - user: + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never type: string type: object - seccompProfile: + artifactory: properties: - localhostProfile: - type: string - type: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object required: - - type + - url type: object - windowsOptions: + azure: properties: - gmsaCredentialSpec: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: type: string - gmsaCredentialSpecName: + container: type: string - hostProcess: + endpoint: + type: string + useSDKCreds: type: boolean - runAsUserName: + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key type: object - type: object - startupProbe: - properties: - exec: + git: properties: - command: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: items: type: string type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo type: object - failureThreshold: - format: int32 - type: integer - grpc: + globalName: + type: string + hdfs: properties: - port: - format: int32 - type: integer - service: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: type: string required: - - port + - path type: object - httpGet: + http: properties: - host: - type: string - httpHeaders: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: items: properties: name: @@ -11828,117 +15303,21 @@ spec: - value type: object type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: + url: type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true required: - - port + - url type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: + mode: format: int32 type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: + name: + type: string + optional: type: boolean - artifactory: + oss: properties: - passwordSecret: + accessKeySecret: properties: key: type: string @@ -11949,9 +15328,24 @@ spec: required: - key type: object - url: + bucket: type: string - usernameSecret: + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + key: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: properties: key: type: string @@ -11962,20 +15356,25 @@ spec: required: - key type: object + securityToken: + type: string required: - - url + - key type: object - from: - type: string - fromExpression: + path: type: string - gcs: + raw: properties: - bucket: - type: string - key: + data: type: string - serviceAccountKeySecret: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: properties: key: type: string @@ -11986,23 +15385,44 @@ spec: required: - key type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: type: boolean - passwordSecret: + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: key: type: string @@ -12013,11 +15433,37 @@ spec: required: - key type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: + useSDKCreds: + type: boolean + type: object + subPath: + type: string + required: + - name + type: object + type: array + exitCode: + type: string + parameters: + items: + properties: + default: + type: string + description: + type: string + enum: + items: + type: string + type: array + globalName: + type: string + name: + type: string + value: + type: string + valueFrom: + properties: + configMapKeyRef: properties: key: type: string @@ -12028,800 +15474,549 @@ spec: required: - key type: object - usernameSecret: - properties: - key: + default: + type: string + event: + type: string + expression: + type: string + jqFilter: + type: string + jsonPath: + type: string + parameter: + type: string + path: + type: string + supplied: + type: object + type: object + required: + - name + type: object + type: array + result: + type: string + type: object + parallelism: + format: int64 + type: integer + plugin: + type: object + podSpecPatch: + type: string + priority: + format: int32 + type: integer + priorityClassName: + type: string + resource: + properties: + action: + type: string + failureCondition: + type: string + flags: + items: + type: string + type: array + manifest: + type: string + manifestFrom: + properties: + artifact: + properties: + archive: + properties: + none: + type: object + tar: + properties: + compressionLevel: + format: int32 + type: integer + type: object + zip: + type: object + type: object + archiveLogs: + type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + url: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - url + type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean + from: + type: string + fromExpression: + type: string + gcs: + properties: + bucket: + type: string + key: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - key + type: object + git: + properties: + branch: + type: string + depth: + format: int64 + type: integer + disableSubmodules: + type: boolean + fetch: + items: type: string - name: + type: array + insecureIgnoreHostKey: + type: boolean + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repo: + type: string + revision: + type: string + singleBranch: + type: boolean + sshPrivateKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + required: + - repo + type: object + globalName: + type: string + hdfs: + properties: + addresses: + items: type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + type: array + force: + type: boolean + hdfsUser: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + krbCCacheSecret: properties: + key: + type: string name: type: string - value: + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + path: + type: string + required: + - path + type: object + http: + properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object + headers: + items: properties: - key: - type: string name: type: string - optional: - type: boolean + value: + type: string required: - - key + - name + - value type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string + type: array + url: + type: string + required: + - url + type: object + mode: + format: int32 + type: integer name: type: string optional: type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: + oss: properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string key: type: string - value: + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: type: string required: - key - - value type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: + path: + type: string + raw: + properties: + data: type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: + required: + - data + type: object + recurseMode: + type: boolean + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + encryptionOptions: + properties: + enableEncryption: + type: boolean + kmsEncryptionContext: + type: string + kmsKeyId: + type: string + serverSideCustomerKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + endpoint: + type: string + insecure: + type: boolean + key: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: properties: - name: + key: type: string - value: + name: type: string + optional: + type: boolean required: - - name - - value + - key type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: + useSDKCreds: + type: boolean + type: object + subPath: type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string + required: + - name + type: object + required: + - artifact + type: object mergeStrategy: type: string setOwnerReference: diff --git a/manifests/base/crds/full/kustomization.yaml b/manifests/base/crds/full/kustomization.yaml index a59214c5bb7d..6a479e4de9e7 100644 --- a/manifests/base/crds/full/kustomization.yaml +++ b/manifests/base/crds/full/kustomization.yaml @@ -9,3 +9,4 @@ resources: - argoproj.io_workfloweventbindings.yaml - argoproj.io_workflowtasksets.yaml - argoproj.io_workflowtaskresults.yaml +- argoproj.io_workflowartifactgctasks.yaml diff --git a/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml b/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml new file mode 100644 index 000000000000..35b1e2eab548 --- /dev/null +++ b/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml @@ -0,0 +1,41 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: workflowartifactgctasks.argoproj.io +spec: + group: argoproj.io + names: + kind: WorkflowArtifactGCTask + listKind: WorkflowArtifactGCTaskList + plural: workflowartifactgctasks + shortNames: + - wfat + singular: workflowartifactgctask + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + status: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + required: + - metadata + - spec + type: object + served: true + storage: true + subresources: + status: {} diff --git a/manifests/base/crds/minimal/argoproj.io_workflows.yaml b/manifests/base/crds/minimal/argoproj.io_workflows.yaml index 6e646adc7d4a..7a5b598d5929 100644 --- a/manifests/base/crds/minimal/argoproj.io_workflows.yaml +++ b/manifests/base/crds/minimal/argoproj.io_workflows.yaml @@ -23,6 +23,11 @@ spec: jsonPath: .status.startedAt name: Age type: date + - description: Human readable message indicating details about why the workflow + is in this condition. + jsonPath: .status.message + name: Message + type: string name: v1alpha1 schema: openAPIV3Schema: diff --git a/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml b/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml index 84d60f3e7ea6..e76677f7295d 100644 --- a/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml +++ b/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml @@ -43,6 +43,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -72,6 +95,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -98,6 +149,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -124,6 +177,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -207,6 +262,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: diff --git a/manifests/base/crds/minimal/kustomization.yaml b/manifests/base/crds/minimal/kustomization.yaml index a59214c5bb7d..6a479e4de9e7 100644 --- a/manifests/base/crds/minimal/kustomization.yaml +++ b/manifests/base/crds/minimal/kustomization.yaml @@ -9,3 +9,4 @@ resources: - argoproj.io_workfloweventbindings.yaml - argoproj.io_workflowtasksets.yaml - argoproj.io_workflowtaskresults.yaml +- argoproj.io_workflowartifactgctasks.yaml diff --git a/manifests/base/workflow-controller/kustomization.yaml b/manifests/base/workflow-controller/kustomization.yaml index 0168142a3d0d..a0192952c890 100644 --- a/manifests/base/workflow-controller/kustomization.yaml +++ b/manifests/base/workflow-controller/kustomization.yaml @@ -5,5 +5,4 @@ resources: - workflow-controller-configmap.yaml - workflow-controller-deployment.yaml - workflow-controller-sa.yaml -- workflow-controller-metrics-service.yaml - workflow-controller-priorityclass.yaml diff --git a/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml b/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml deleted file mode 100644 index d040adaa08a9..000000000000 --- a/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: workflow-controller-metrics - labels: - app: workflow-controller -spec: - selector: - app: workflow-controller - ports: - - name: metrics - port: 9090 - targetPort: 9090 - protocol: TCP diff --git a/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml b/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml index 74e7849f901a..0882c9a7b9c9 100644 --- a/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml +++ b/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml @@ -18,8 +18,6 @@ rules: verbs: - get - create - - list - - watch - apiGroups: - "" resources: diff --git a/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml b/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml index 6d7ac27ff5e0..7abedd7b4fd3 100644 --- a/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml +++ b/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml @@ -9,4 +9,3 @@ roleRef: subjects: - kind: ServiceAccount name: argo-server - namespace: argo diff --git a/manifests/cluster-install/kustomization.yaml b/manifests/cluster-install/kustomization.yaml index a1698bd2ac72..986293cd64b0 100644 --- a/manifests/cluster-install/kustomization.yaml +++ b/manifests/cluster-install/kustomization.yaml @@ -5,3 +5,5 @@ resources: - ../base - ./workflow-controller-rbac - ./argo-server-rbac + +namespace: argo \ No newline at end of file diff --git a/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml b/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml index 9842cb07fe11..5619492b1a65 100644 --- a/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml +++ b/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml @@ -18,6 +18,8 @@ rules: - cronworkflows/finalizers - clusterworkflowtemplates - clusterworkflowtemplates/finalizers + - workflowtaskresults + - workflowtaskresults/finalizers verbs: - get - list @@ -44,6 +46,8 @@ rules: - cronworkflows/finalizers - clusterworkflowtemplates - clusterworkflowtemplates/finalizers + - workflowtaskresults + - workflowtaskresults/finalizers verbs: - create - delete @@ -77,6 +81,8 @@ rules: - clusterworkflowtemplates/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowtaskresults + - workflowtaskresults/finalizers verbs: - create - delete diff --git a/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml b/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml index 3d6b5a39539d..b547e0097382 100644 --- a/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml +++ b/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml @@ -41,6 +41,7 @@ rules: - workflows/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowartifactgctasks verbs: - get - list diff --git a/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml b/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml index b4e54ea67c90..f1985fabf3bf 100644 --- a/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml +++ b/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml @@ -9,4 +9,3 @@ roleRef: subjects: - kind: ServiceAccount name: argo - namespace: argo diff --git a/manifests/install.yaml b/manifests/install.yaml deleted file mode 100644 index 9ef46c4c6f66..000000000000 --- a/manifests/install.yaml +++ /dev/null @@ -1,1131 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: clusterworkflowtemplates.argoproj.io -spec: - group: argoproj.io - names: - kind: ClusterWorkflowTemplate - listKind: ClusterWorkflowTemplateList - plural: clusterworkflowtemplates - shortNames: - - clusterwftmpl - - cwft - singular: clusterworkflowtemplate - scope: Cluster - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: cronworkflows.argoproj.io -spec: - group: argoproj.io - names: - kind: CronWorkflow - listKind: CronWorkflowList - plural: cronworkflows - shortNames: - - cwf - - cronwf - singular: cronworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workfloweventbindings.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowEventBinding - listKind: WorkflowEventBindingList - plural: workfloweventbindings - shortNames: - - wfeb - singular: workfloweventbinding - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflows.argoproj.io -spec: - group: argoproj.io - names: - kind: Workflow - listKind: WorkflowList - plural: workflows - shortNames: - - wf - singular: workflow - scope: Namespaced - versions: - - additionalPrinterColumns: - - description: Status of the workflow - jsonPath: .status.phase - name: Status - type: string - - description: When the workflow was started - format: date-time - jsonPath: .status.startedAt - name: Age - type: date - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: {} ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtaskresults.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTaskResult - listKind: WorkflowTaskResultList - plural: workflowtaskresults - singular: workflowtaskresult - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - message: - type: string - metadata: - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - progress: - type: string - required: - - metadata - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtasksets.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTaskSet - listKind: WorkflowTaskSetList - plural: workflowtasksets - shortNames: - - wfts - singular: workflowtaskset - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtemplates.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTemplate - listKind: WorkflowTemplateList - plural: workflowtemplates - shortNames: - - wftmpl - singular: workflowtemplate - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: argo ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: argo-server ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: argo-role -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - create - - get - - update -- apiGroups: - - "" - resources: - - secrets - verbs: - - get ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - rbac.authorization.k8s.io/aggregate-to-admin: "true" - name: argo-aggregate-to-admin -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - - workflowtasksets - - workflowtasksets/finalizers - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - rbac.authorization.k8s.io/aggregate-to-edit: "true" - name: argo-aggregate-to-edit -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - rbac.authorization.k8s.io/aggregate-to-view: "true" - name: argo-aggregate-to-view -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - get - - list - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - name: argo-cluster-role -rules: -- apiGroups: - - "" - resources: - - pods - - pods/exec - verbs: - - create - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list -- apiGroups: - - "" - resources: - - persistentvolumeclaims - - persistentvolumeclaims/finalizers - verbs: - - create - - update - - delete - - get -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workflowtasksets - - workflowtasksets/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete - - create -- apiGroups: - - argoproj.io - resources: - - workflowtemplates - - workflowtemplates/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - get - - list - - watch -- apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - list - - watch - - deletecollection -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list -- apiGroups: - - argoproj.io - resources: - - cronworkflows - - cronworkflows/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - events - verbs: - - create - - patch -- apiGroups: - - policy - resources: - - poddisruptionbudgets - verbs: - - create - - get - - delete ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - name: argo-server-cluster-role -rules: -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list -- apiGroups: - - "" - resources: - - secrets - verbs: - - get - - create - - list - - watch -- apiGroups: - - "" - resources: - - pods - - pods/exec - - pods/log - verbs: - - get - - list - - watch - - delete -- apiGroups: - - "" - resources: - - events - verbs: - - watch - - create - - patch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - - watch -- apiGroups: - - argoproj.io - resources: - - eventsources - - sensors - - workflows - - workfloweventbindings - - workflowtemplates - - cronworkflows - - clusterworkflowtemplates - verbs: - - create - - get - - list - - watch - - update - - patch - - delete ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: argo-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-role -subjects: -- kind: ServiceAccount - name: argo ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: argo-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-cluster-role -subjects: -- kind: ServiceAccount - name: argo - namespace: argo ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: argo-server-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-server-cluster-role -subjects: -- kind: ServiceAccount - name: argo-server - namespace: argo ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: workflow-controller-configmap ---- -apiVersion: v1 -kind: Service -metadata: - name: argo-server -spec: - ports: - - name: web - port: 2746 - targetPort: 2746 - selector: - app: argo-server ---- -apiVersion: v1 -kind: Service -metadata: - labels: - app: workflow-controller - name: workflow-controller-metrics -spec: - ports: - - name: metrics - port: 9090 - protocol: TCP - targetPort: 9090 - selector: - app: workflow-controller ---- -apiVersion: scheduling.k8s.io/v1 -kind: PriorityClass -metadata: - name: workflow-controller -value: 1000000 ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: argo-server -spec: - selector: - matchLabels: - app: argo-server - template: - metadata: - labels: - app: argo-server - spec: - containers: - - args: - - server - env: [] - image: quay.io/argoproj/argocli:latest - name: argo-server - ports: - - containerPort: 2746 - name: web - readinessProbe: - httpGet: - path: / - port: 2746 - scheme: HTTPS - initialDelaySeconds: 10 - periodSeconds: 20 - securityContext: - capabilities: - drop: - - ALL - volumeMounts: - - mountPath: /tmp - name: tmp - nodeSelector: - kubernetes.io/os: linux - securityContext: - runAsNonRoot: true - serviceAccountName: argo-server - volumes: - - emptyDir: {} - name: tmp ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: workflow-controller -spec: - selector: - matchLabels: - app: workflow-controller - template: - metadata: - labels: - app: workflow-controller - spec: - containers: - - args: [] - command: - - workflow-controller - env: - - name: LEADER_ELECTION_IDENTITY - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.name - image: quay.io/argoproj/workflow-controller:latest - livenessProbe: - failureThreshold: 3 - httpGet: - path: /healthz - port: 6060 - initialDelaySeconds: 90 - periodSeconds: 60 - timeoutSeconds: 30 - name: workflow-controller - ports: - - containerPort: 9090 - name: metrics - - containerPort: 6060 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: true - runAsNonRoot: true - nodeSelector: - kubernetes.io/os: linux - priorityClassName: workflow-controller - securityContext: - runAsNonRoot: true - serviceAccountName: argo diff --git a/manifests/namespace-install.yaml b/manifests/namespace-install.yaml deleted file mode 100644 index e1169edb64fa..000000000000 --- a/manifests/namespace-install.yaml +++ /dev/null @@ -1,1031 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: clusterworkflowtemplates.argoproj.io -spec: - group: argoproj.io - names: - kind: ClusterWorkflowTemplate - listKind: ClusterWorkflowTemplateList - plural: clusterworkflowtemplates - shortNames: - - clusterwftmpl - - cwft - singular: clusterworkflowtemplate - scope: Cluster - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: cronworkflows.argoproj.io -spec: - group: argoproj.io - names: - kind: CronWorkflow - listKind: CronWorkflowList - plural: cronworkflows - shortNames: - - cwf - - cronwf - singular: cronworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workfloweventbindings.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowEventBinding - listKind: WorkflowEventBindingList - plural: workfloweventbindings - shortNames: - - wfeb - singular: workfloweventbinding - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflows.argoproj.io -spec: - group: argoproj.io - names: - kind: Workflow - listKind: WorkflowList - plural: workflows - shortNames: - - wf - singular: workflow - scope: Namespaced - versions: - - additionalPrinterColumns: - - description: Status of the workflow - jsonPath: .status.phase - name: Status - type: string - - description: When the workflow was started - format: date-time - jsonPath: .status.startedAt - name: Age - type: date - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: {} ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtaskresults.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTaskResult - listKind: WorkflowTaskResultList - plural: workflowtaskresults - singular: workflowtaskresult - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - message: - type: string - metadata: - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - progress: - type: string - required: - - metadata - type: object - served: true - storage: true ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtasksets.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTaskSet - listKind: WorkflowTaskSetList - plural: workflowtasksets - shortNames: - - wfts - singular: workflowtaskset - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: workflowtemplates.argoproj.io -spec: - group: argoproj.io - names: - kind: WorkflowTemplate - listKind: WorkflowTemplateList - plural: workflowtemplates - shortNames: - - wftmpl - singular: workflowtemplate - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: argo ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: argo-server ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: argo-role -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - create - - get - - update -- apiGroups: - - "" - resources: - - pods - - pods/exec - verbs: - - create - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list -- apiGroups: - - "" - resources: - - persistentvolumeclaims - - persistentvolumeclaims/finalizers - verbs: - - create - - update - - delete - - get -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workflowtasksets - - workflowtasksets/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete - - create -- apiGroups: - - argoproj.io - resources: - - workflowtemplates - - workflowtemplates/finalizers - verbs: - - get - - list - - watch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list -- apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - list - - watch - - deletecollection -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list -- apiGroups: - - "" - resources: - - secrets - verbs: - - get -- apiGroups: - - argoproj.io - resources: - - cronworkflows - - cronworkflows/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - events - verbs: - - create - - patch -- apiGroups: - - policy - resources: - - poddisruptionbudgets - verbs: - - create - - get - - delete ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: argo-server-role -rules: -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list -- apiGroups: - - "" - resources: - - secrets - verbs: - - get - - create - - list - - watch -- apiGroups: - - "" - resources: - - pods - - pods/exec - - pods/log - verbs: - - get - - list - - watch - - delete -- apiGroups: - - "" - resources: - - events - verbs: - - watch - - create - - patch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - - watch -- apiGroups: - - argoproj.io - resources: - - eventsources - - sensors - - workflows - - workfloweventbindings - - workflowtemplates - - cronworkflows - - cronworkflows/finalizers - verbs: - - create - - get - - list - - watch - - update - - patch - - delete ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: argo-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-role -subjects: -- kind: ServiceAccount - name: argo ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: argo-server-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-server-role -subjects: -- kind: ServiceAccount - name: argo-server ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: workflow-controller-configmap ---- -apiVersion: v1 -kind: Service -metadata: - name: argo-server -spec: - ports: - - name: web - port: 2746 - targetPort: 2746 - selector: - app: argo-server ---- -apiVersion: v1 -kind: Service -metadata: - labels: - app: workflow-controller - name: workflow-controller-metrics -spec: - ports: - - name: metrics - port: 9090 - protocol: TCP - targetPort: 9090 - selector: - app: workflow-controller ---- -apiVersion: scheduling.k8s.io/v1 -kind: PriorityClass -metadata: - name: workflow-controller -value: 1000000 ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: argo-server -spec: - selector: - matchLabels: - app: argo-server - template: - metadata: - labels: - app: argo-server - spec: - containers: - - args: - - server - - --namespaced - env: [] - image: quay.io/argoproj/argocli:latest - name: argo-server - ports: - - containerPort: 2746 - name: web - readinessProbe: - httpGet: - path: / - port: 2746 - scheme: HTTPS - initialDelaySeconds: 10 - periodSeconds: 20 - securityContext: - capabilities: - drop: - - ALL - volumeMounts: - - mountPath: /tmp - name: tmp - nodeSelector: - kubernetes.io/os: linux - securityContext: - runAsNonRoot: true - serviceAccountName: argo-server - volumes: - - emptyDir: {} - name: tmp ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: workflow-controller -spec: - selector: - matchLabels: - app: workflow-controller - template: - metadata: - labels: - app: workflow-controller - spec: - containers: - - args: - - --namespaced - command: - - workflow-controller - env: - - name: LEADER_ELECTION_IDENTITY - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.name - image: quay.io/argoproj/workflow-controller:latest - livenessProbe: - failureThreshold: 3 - httpGet: - path: /healthz - port: 6060 - initialDelaySeconds: 90 - periodSeconds: 60 - timeoutSeconds: 30 - name: workflow-controller - ports: - - containerPort: 9090 - name: metrics - - containerPort: 6060 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: true - runAsNonRoot: true - nodeSelector: - kubernetes.io/os: linux - priorityClassName: workflow-controller - securityContext: - runAsNonRoot: true - serviceAccountName: argo diff --git a/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml b/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml index 0470b138d9d3..314177a3ef8e 100644 --- a/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml +++ b/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml @@ -18,8 +18,6 @@ rules: verbs: - get - create - - list - - watch - apiGroups: - "" resources: diff --git a/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml b/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml index c467ae17fbed..4e8df30fa5f2 100644 --- a/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml +++ b/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml @@ -49,6 +49,7 @@ rules: - workflows/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowartifactgctasks verbs: - get - list @@ -66,13 +67,6 @@ rules: - get - list - watch - - apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - apiGroups: - argoproj.io resources: diff --git a/manifests/quick-start-minimal.yaml b/manifests/quick-start-minimal.yaml index 23043187203a..5fe8a45864e5 100644 --- a/manifests/quick-start-minimal.yaml +++ b/manifests/quick-start-minimal.yaml @@ -79,6 +79,48 @@ spec: --- apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition +metadata: + name: workflowartifactgctasks.argoproj.io +spec: + group: argoproj.io + names: + kind: WorkflowArtifactGCTask + listKind: WorkflowArtifactGCTaskList + plural: workflowartifactgctasks + shortNames: + - wfat + singular: workflowartifactgctask + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + status: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + required: + - metadata + - spec + type: object + served: true + storage: true + subresources: + status: {} +--- +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition metadata: name: workfloweventbindings.argoproj.io spec: @@ -138,6 +180,11 @@ spec: jsonPath: .status.startedAt name: Age type: date + - description: Human readable message indicating details about why the workflow + is in this condition. + jsonPath: .status.message + name: Message + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -209,6 +256,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -238,6 +308,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -264,6 +362,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -290,6 +390,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -373,6 +475,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -758,6 +964,7 @@ rules: - workflows/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowartifactgctasks verbs: - get - list @@ -775,13 +982,6 @@ rules: - get - list - watch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - apiGroups: - argoproj.io resources: @@ -851,8 +1051,6 @@ rules: verbs: - get - create - - list - - watch - apiGroups: - "" resources: @@ -901,6 +1099,28 @@ rules: --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role +metadata: + annotations: + workflows.argoproj.io/description: | + This is the minimum recommended permissions needed if you want to use artifact GC. + name: artifactgc +rules: +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks + verbs: + - list + - watch +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks/status + verbs: + - patch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role metadata: annotations: workflows.argoproj.io/description: | @@ -1045,6 +1265,18 @@ subjects: --- apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding +metadata: + name: artifactgc-default +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: artifactgc +subjects: +- kind: ServiceAccount + name: default +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding metadata: name: executor-default roleRef: @@ -1164,29 +1396,14 @@ data: secretKeySecret: name: my-minio-cred key: secretkey - containerRuntimeExecutors: | - - name: emissary - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: emissary - - name: pns - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: pns executor: | resources: requests: cpu: 10m memory: 64Mi images: | - argoproj/argosay:v1: - command: [cowsay] - argoproj/argosay:v2: - command: [/argosay] docker/whalesay:latest: - command: [cowsay] - python:alpine3.6: - command: [python3] + cmd: [cowsay] links: | - name: Workflow Link scope: workflow @@ -1244,6 +1461,101 @@ stringData: --- apiVersion: v1 kind: Secret +metadata: + labels: + app: httpbin + name: my-httpbin-cred +stringData: + cert.pem: | + -----BEGIN CERTIFICATE----- + MIIEmjCCAoICCQDQejieQSZTxzANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0 + ZXN0MB4XDTIyMDQyNTEzNDc0MloXDTMyMDQyMjEzNDc0MlowDzENMAsGA1UEAwwE + dGVzdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHT/tfskuXizar1 + 5DDrSkaT1cuCdQhEO7b6haxfvfMJPY9sxaxR570bw5TWQzA0xdAeUzSCbRsvxw6b + fEyLD4NajdXtcKocYUUcLclzjgyogTDPqlzAfDVZD25ySOTZ150pQaBuIi6TgnqH + WdJEh9w5//5VZmKyMx49JZMW7ADb9qYxkKVPIan3aNEXOO4SxyjsSekUFefkZOld + /RVZ8nO8hnDQ7r5NXsIIWVh35A94CA8y6QpKL2qiEFW1fofRcr/Fe/Y/5ohBQ1Ur + NMcX87zm9kXX1y6wbp3wn5f1PUa1sCUPlxChmRmPPmr4yIqq0a8C1d71jOIbhkox + 7A30HsP1D3rdxU6eb7KBYb7kShZge1batHRogRe5uX6hGO8iHBV/GdDE6jszoGPU + ejhfwblr6AeR6ImrWmrJ4rAx/jNqcHPuktnMRlLsBzdhqRwelwgnN13O5ZYiEJg4 + X3YYp678kHnc58aOkhG2nM32cIGha4tkoGM/GpDnFAd0P0gyJVwKo2A2Wc4cMlzQ + 7dokXbkkzK6lrHJnJjiOfzjD5yMB1Q1zQXKGHB2hJSWAMTjJ9f6qQd3ZaarYPTLx + vc4WTu+547Sx81Vlnes2xTSgt6pyFSBppHpS7KkOxb+wRF2oIpgLA3mQmsq2c60+ + G8/ro91YAYN+cl+v7m1DyEpD9TW/AgMBAAEwDQYJKoZIhvcNAQELBQADggIBACO7 + 2hU2BSGU66FwpIOihgcaADH0SwokjrEJVXlnMv26JzG/Ja63gTNE5OyghufsJtUi + E7E1gOH+dH6lVOIEmQdgGZazGxye20diLlicBATa5W2IuaBzb8Bq7ap75jOB7/sH + Yh+ZV9w0CWgV7KgzJQsp6KPfpMUXn9aJkRkLlCToCj60tC1agw5wzQcokDhOMJaY + 49FFVoKtVYwN6DfXL5Qi4GUmg7NwMUQAOGD6BQ8VLdbSJoWSHvgR2z5SDIubpdyy + XDe2V6lusdka8jdRsFH+TUKyGubs3c5YVq80A8itavxPXBUM/OJCHhUA1VpL3rvz + VgANVV7XFn5fN5TdTOrgJa2LBjflYBC3KiLf1jiW68ZT2rLDrC0yVdHFY0UJG/du + kWWQpZTfdpGbZOl1rQcYQ3BREWkr5kAv8Sh3sPliibVRvyFzwAqpEUDbpCz/Z3kZ + mRPU1Ukz8gjr5FBwzNn4x/l+80kgM22qXLMgxf7cqSLxH+dylmIieLGU0s1k7BqK + Dw77DP1QZe4G6WwrdGooxSYSBn4joKV4TI9sbyd34HJQnkMch0ugz9dlpZyT1P8Y + 3xU8Qj1BIF8yoyRuzbOokd9cEjNC6N+Z4g5lLEKYM/j1f0r3tGEoZAu2p39UGLa8 + aszMnFjeymK5OCkMUhg/KNr4WK58pc/3uFMhy8bn + -----END CERTIFICATE----- + clientID: admin + clientSecret: password + key.pem: | + -----BEGIN PRIVATE KEY----- + MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDB0/7X7JLl4s2q + 9eQw60pGk9XLgnUIRDu2+oWsX73zCT2PbMWsUee9G8OU1kMwNMXQHlM0gm0bL8cO + m3xMiw+DWo3V7XCqHGFFHC3Jc44MqIEwz6pcwHw1WQ9uckjk2dedKUGgbiIuk4J6 + h1nSRIfcOf/+VWZisjMePSWTFuwA2/amMZClTyGp92jRFzjuEsco7EnpFBXn5GTp + Xf0VWfJzvIZw0O6+TV7CCFlYd+QPeAgPMukKSi9qohBVtX6H0XK/xXv2P+aIQUNV + KzTHF/O85vZF19cusG6d8J+X9T1GtbAlD5cQoZkZjz5q+MiKqtGvAtXe9YziG4ZK + MewN9B7D9Q963cVOnm+ygWG+5EoWYHtW2rR0aIEXubl+oRjvIhwVfxnQxOo7M6Bj + 1Ho4X8G5a+gHkeiJq1pqyeKwMf4zanBz7pLZzEZS7Ac3YakcHpcIJzddzuWWIhCY + OF92GKeu/JB53OfGjpIRtpzN9nCBoWuLZKBjPxqQ5xQHdD9IMiVcCqNgNlnOHDJc + 0O3aJF25JMyupaxyZyY4jn84w+cjAdUNc0FyhhwdoSUlgDE4yfX+qkHd2Wmq2D0y + 8b3OFk7vueO0sfNVZZ3rNsU0oLeqchUgaaR6UuypDsW/sERdqCKYCwN5kJrKtnOt + PhvP66PdWAGDfnJfr+5tQ8hKQ/U1vwIDAQABAoICAQCL2aAIv4MGJ2zpq10oBryi + y8v4eHpkqobDcWK9ip8NGl+2em7t9HLWOZAWdboosAsCLL8wJeL/OKvRWFKJD9Tz + m4S3FAi0VKHCMaC/t4aIj5QXWd676Y41F7tQn1kE9kDh/oCBdrVnEbuVGM+wLQ4x + 0g9ovMmQ8K59ZPUVefZycEM4io6pF71cW0zfgHftHtNgLYzuhTWBCYPd9ZjDrRCI + fUArajS4Ti7OpSOB948vshVukfcfG4O21pQeo0NWT8MRpzXX6Sc2rJAehXwhIqEU + bTjIEAIMh/RoNNOR2rqJqFIdi3Ad6dsDXB1XJYXct39vXQZfRqCOC/oK0pZVQwxm + aMbb6VzMjE/paHcBLKorvSIEpuAkgesUkqJeMPxhVnVG6Tg5Xl0WM0pCh/mfir6i + gFGz/xXb0h8pj9Ksk6QpTOTqDf9JAHCuhp9hnuUR+wpnfKyOfOoDXfAyKjHR0bXz + XF9DhycErHDY4CWlhFiu8+qzrtR/sZ/AIp2MfjOzBZYoq7Zj2Z3yXDsvr5fpXUW8 + EU+ClnE/dgRBj5z1sKEQd471+R7PU3Q5apw3YlQZClsaaciTIeWOMOwBjxm9PbZL + CX9BzYaobVAy19ib+/7JgqNxsZ/3gL2xBQU1JoKeY2GnAyyyr8arLZaFR/CUGYyV + SWOdWwLxgThXIJofA3c5QQKCAQEA701sUqcRN8W/m6nCcav8I2EMU/4h18J3bk88 + NbK8vCsDvvFl/2EcjU/6iKcuxhNg1CjHD96H42FeXVXApkdnf2pF24nJHW8M18yH + uwPNzIDnLn9LSN6nJsgyo5LuVCXhf2C4UImv9P3Ae1meI/ApBJsad/bAY8MMHwtS + G/ph/yzhbAb2xF4oJwgOXBm0G2c9sfA0OlHSvYM/kvsQE6770BQ5S1ltrfIv++4J + qydiJ0Hq0RFM4aHCCi02cWp+43ALhh3EAPHN3ANpmV1IQKqyAeRFX1sqQuqpryQs + wHQxdF9FLCXHwaF8JOwheu9MTclUZdrkIRf2xac2qdFIszxCkQKCAQEAz1pHtm+f + HYJdOT3XKKgri4/OPZ7nzb1NcT34NbOPKmhRi38ZxutKrFTy+DCtpjUwF4YlE7oa + r13upHaxgxHMa9cpLN+69hmT2Pg2oMBvfEIkAiDKt4dcFQBDDKw98lpXNIStsCDp + nRcToI2TO1AMJNWCulAONov9vGggjS7mxt76cQ2QZH4k6W4yYDcC6g311vR+obA9 + MwJxZfuESw1CLzvE8Ua0esQnXQzpwECC05Q6oObeJ/44huQF7R2MP5iEmDLkgYjj + G5cmHAdD3u0Ceol3zFqF0YDxcfuglMvpmdBpjNj2rl093ufziy84iVTXJ50CRceS + e17et+3kKNF7TwKCAQBJpEHZjaA20ZwNg0hbQtns6Uip8GLpyuaGA8w7mi2KmpIk + iJUi6fenZR1sQEacngoGQCZCM/ENgEFR57nJcd/fzgyBav2BGVOSdVavrpP+gwyh + unqoihxWSvWKcQT20FF8qX8PCdAkTJKXYxTPanC1AiY7FKxQBw4L36f9BCh0JpOY + cuwtsewZVtlUbnSGmlbaE1l/OP7pYyKFUM25wPetKQwYrAScqxMpLC+9g/koq5hf + jjtilCzqhM9kR6mUxD5Hn5FZ2Q/IzSQKFjLN87mj62ON3Lg8r4pYY4GCGD+/2DGp + TFcUt2VE14XWFx4cMgDO93WM2ZsPaE3iJI2C2uCBAoIBADGmr5da4SICzmnfif7d + ThgMJlmRDHayhrHAIghR581Cz4v0smp0exwK92dA2MP85ngrkgNIRA2ME5HkLhtx + jp6gFeb959n4Q/Pnc8VIbym0+MRdr80Ep6MLvgJx2B+JTGpx/tk2+Fm6ZePDIudI + ArBrQ/NzKgQbv3V3BZxpB6/FQvkBQ3sczZ/r2Do70gHTt/Nx9kSnW/Az/I1sDcCe + +yMuT7lqsdrXz4kzh2GW0Pzy+JsAzV+MO2LphRXDRosP7Wg4f4kZCzDXH7QEdVcT + L83BzyLq5jJFiws9MrWOonBHfI7SgTc9coxGxIWmmAYif6anrRyibkwGapRmbYTs + rHcCggEATsKrZHJkZIfxVdw1uELZxDssxtSd3KS09xN2aypGPdSvWg2Di3NbQsNt + 4xSljnjWsYLZpxKYv3dUOOJIiIFGxVCPNCF1vL3ofqrBelXF3AAICa+ktzPQqBDI + eGPj1/h/HodY2pVHVyhZmFFsFy8We/wD64QRx4xI0w9xFAt0qmKVMoCsJmdrXGcO + kYqZnhkq3OyCswrk78OvFcB2Wnk7SWH2tYhBhOqFv8uPojaiRLOb/6xZaZOA9TPi + 0mpJScl+pVxs1UGShVH74lIvhPaPq0AHgK1y1yYphKc1A07l2z0+S1tSYOvdQY8k + NuJLvtwCMGDCxhdYm7OrJ0aUfZzP6w== + -----END PRIVATE KEY----- + pass: password + tokenURL: http://httpbin:9100/response-headers?access_token=faketoken&token_type=Bearer + user: admin +type: Opaque +--- +apiVersion: v1 +kind: Secret metadata: labels: app: minio @@ -1269,30 +1581,35 @@ apiVersion: v1 kind: Service metadata: labels: - app: minio - name: minio + app: httpbin + name: httpbin spec: ports: - - port: 9000 + - name: api + port: 9100 protocol: TCP - targetPort: 9000 + targetPort: 80 selector: - app: minio + app: httpbin --- apiVersion: v1 kind: Service metadata: labels: - app: workflow-controller - name: workflow-controller-metrics + app: minio + name: minio spec: ports: - - name: metrics - port: 9090 + - name: api + port: 9000 protocol: TCP - targetPort: 9090 + targetPort: 9000 + - name: dashboard + port: 9001 + protocol: TCP + targetPort: 9001 selector: - app: workflow-controller + app: minio --- apiVersion: scheduling.k8s.io/v1 kind: PriorityClass @@ -1335,9 +1652,12 @@ spec: initialDelaySeconds: 10 periodSeconds: 20 securityContext: + allowPrivilegeEscalation: false capabilities: drop: - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true volumeMounts: - mountPath: /tmp name: tmp @@ -1352,6 +1672,40 @@ spec: --- apiVersion: apps/v1 kind: Deployment +metadata: + labels: + app: httpbin + name: httpbin +spec: + selector: + matchLabels: + app: httpbin + template: + metadata: + labels: + app: httpbin + spec: + containers: + - image: kennethreitz/httpbin:latest + livenessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 + name: main + ports: + - containerPort: 80 + name: api + readinessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 +--- +apiVersion: apps/v1 +kind: Deployment metadata: labels: app: minio @@ -1369,6 +1723,8 @@ spec: - command: - minio - server + - --console-address + - :9001 - /data env: - name: MINIO_ACCESS_KEY @@ -1392,6 +1748,9 @@ spec: name: main ports: - containerPort: 9000 + name: api + - containerPort: 9001 + name: dashboard readinessProbe: httpGet: path: /minio/health/ready diff --git a/manifests/quick-start-mysql.yaml b/manifests/quick-start-mysql.yaml index c3cba8694edd..37f7d52299b1 100644 --- a/manifests/quick-start-mysql.yaml +++ b/manifests/quick-start-mysql.yaml @@ -79,6 +79,48 @@ spec: --- apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition +metadata: + name: workflowartifactgctasks.argoproj.io +spec: + group: argoproj.io + names: + kind: WorkflowArtifactGCTask + listKind: WorkflowArtifactGCTaskList + plural: workflowartifactgctasks + shortNames: + - wfat + singular: workflowartifactgctask + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + status: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + required: + - metadata + - spec + type: object + served: true + storage: true + subresources: + status: {} +--- +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition metadata: name: workfloweventbindings.argoproj.io spec: @@ -138,6 +180,11 @@ spec: jsonPath: .status.startedAt name: Age type: date + - description: Human readable message indicating details about why the workflow + is in this condition. + jsonPath: .status.message + name: Message + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -209,6 +256,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -238,6 +308,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -264,6 +362,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -290,6 +390,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -373,6 +475,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -758,6 +964,7 @@ rules: - workflows/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowartifactgctasks verbs: - get - list @@ -775,13 +982,6 @@ rules: - get - list - watch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - apiGroups: - argoproj.io resources: @@ -851,8 +1051,6 @@ rules: verbs: - get - create - - list - - watch - apiGroups: - "" resources: @@ -901,6 +1099,28 @@ rules: --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role +metadata: + annotations: + workflows.argoproj.io/description: | + This is the minimum recommended permissions needed if you want to use artifact GC. + name: artifactgc +rules: +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks + verbs: + - list + - watch +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks/status + verbs: + - patch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role metadata: annotations: workflows.argoproj.io/description: | @@ -1045,6 +1265,18 @@ subjects: --- apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding +metadata: + name: artifactgc-default +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: artifactgc +subjects: +- kind: ServiceAccount + name: default +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding metadata: name: executor-default roleRef: @@ -1164,29 +1396,14 @@ data: secretKeySecret: name: my-minio-cred key: secretkey - containerRuntimeExecutors: | - - name: emissary - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: emissary - - name: pns - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: pns executor: | resources: requests: cpu: 10m memory: 64Mi images: | - argoproj/argosay:v1: - command: [cowsay] - argoproj/argosay:v2: - command: [/argosay] docker/whalesay:latest: - command: [cowsay] - python:alpine3.6: - command: [python3] + cmd: [cowsay] links: | - name: Workflow Link scope: workflow @@ -1274,6 +1491,101 @@ stringData: --- apiVersion: v1 kind: Secret +metadata: + labels: + app: httpbin + name: my-httpbin-cred +stringData: + cert.pem: | + -----BEGIN CERTIFICATE----- + MIIEmjCCAoICCQDQejieQSZTxzANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0 + ZXN0MB4XDTIyMDQyNTEzNDc0MloXDTMyMDQyMjEzNDc0MlowDzENMAsGA1UEAwwE + dGVzdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHT/tfskuXizar1 + 5DDrSkaT1cuCdQhEO7b6haxfvfMJPY9sxaxR570bw5TWQzA0xdAeUzSCbRsvxw6b + fEyLD4NajdXtcKocYUUcLclzjgyogTDPqlzAfDVZD25ySOTZ150pQaBuIi6TgnqH + WdJEh9w5//5VZmKyMx49JZMW7ADb9qYxkKVPIan3aNEXOO4SxyjsSekUFefkZOld + /RVZ8nO8hnDQ7r5NXsIIWVh35A94CA8y6QpKL2qiEFW1fofRcr/Fe/Y/5ohBQ1Ur + NMcX87zm9kXX1y6wbp3wn5f1PUa1sCUPlxChmRmPPmr4yIqq0a8C1d71jOIbhkox + 7A30HsP1D3rdxU6eb7KBYb7kShZge1batHRogRe5uX6hGO8iHBV/GdDE6jszoGPU + ejhfwblr6AeR6ImrWmrJ4rAx/jNqcHPuktnMRlLsBzdhqRwelwgnN13O5ZYiEJg4 + X3YYp678kHnc58aOkhG2nM32cIGha4tkoGM/GpDnFAd0P0gyJVwKo2A2Wc4cMlzQ + 7dokXbkkzK6lrHJnJjiOfzjD5yMB1Q1zQXKGHB2hJSWAMTjJ9f6qQd3ZaarYPTLx + vc4WTu+547Sx81Vlnes2xTSgt6pyFSBppHpS7KkOxb+wRF2oIpgLA3mQmsq2c60+ + G8/ro91YAYN+cl+v7m1DyEpD9TW/AgMBAAEwDQYJKoZIhvcNAQELBQADggIBACO7 + 2hU2BSGU66FwpIOihgcaADH0SwokjrEJVXlnMv26JzG/Ja63gTNE5OyghufsJtUi + E7E1gOH+dH6lVOIEmQdgGZazGxye20diLlicBATa5W2IuaBzb8Bq7ap75jOB7/sH + Yh+ZV9w0CWgV7KgzJQsp6KPfpMUXn9aJkRkLlCToCj60tC1agw5wzQcokDhOMJaY + 49FFVoKtVYwN6DfXL5Qi4GUmg7NwMUQAOGD6BQ8VLdbSJoWSHvgR2z5SDIubpdyy + XDe2V6lusdka8jdRsFH+TUKyGubs3c5YVq80A8itavxPXBUM/OJCHhUA1VpL3rvz + VgANVV7XFn5fN5TdTOrgJa2LBjflYBC3KiLf1jiW68ZT2rLDrC0yVdHFY0UJG/du + kWWQpZTfdpGbZOl1rQcYQ3BREWkr5kAv8Sh3sPliibVRvyFzwAqpEUDbpCz/Z3kZ + mRPU1Ukz8gjr5FBwzNn4x/l+80kgM22qXLMgxf7cqSLxH+dylmIieLGU0s1k7BqK + Dw77DP1QZe4G6WwrdGooxSYSBn4joKV4TI9sbyd34HJQnkMch0ugz9dlpZyT1P8Y + 3xU8Qj1BIF8yoyRuzbOokd9cEjNC6N+Z4g5lLEKYM/j1f0r3tGEoZAu2p39UGLa8 + aszMnFjeymK5OCkMUhg/KNr4WK58pc/3uFMhy8bn + -----END CERTIFICATE----- + clientID: admin + clientSecret: password + key.pem: | + -----BEGIN PRIVATE KEY----- + MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDB0/7X7JLl4s2q + 9eQw60pGk9XLgnUIRDu2+oWsX73zCT2PbMWsUee9G8OU1kMwNMXQHlM0gm0bL8cO + m3xMiw+DWo3V7XCqHGFFHC3Jc44MqIEwz6pcwHw1WQ9uckjk2dedKUGgbiIuk4J6 + h1nSRIfcOf/+VWZisjMePSWTFuwA2/amMZClTyGp92jRFzjuEsco7EnpFBXn5GTp + Xf0VWfJzvIZw0O6+TV7CCFlYd+QPeAgPMukKSi9qohBVtX6H0XK/xXv2P+aIQUNV + KzTHF/O85vZF19cusG6d8J+X9T1GtbAlD5cQoZkZjz5q+MiKqtGvAtXe9YziG4ZK + MewN9B7D9Q963cVOnm+ygWG+5EoWYHtW2rR0aIEXubl+oRjvIhwVfxnQxOo7M6Bj + 1Ho4X8G5a+gHkeiJq1pqyeKwMf4zanBz7pLZzEZS7Ac3YakcHpcIJzddzuWWIhCY + OF92GKeu/JB53OfGjpIRtpzN9nCBoWuLZKBjPxqQ5xQHdD9IMiVcCqNgNlnOHDJc + 0O3aJF25JMyupaxyZyY4jn84w+cjAdUNc0FyhhwdoSUlgDE4yfX+qkHd2Wmq2D0y + 8b3OFk7vueO0sfNVZZ3rNsU0oLeqchUgaaR6UuypDsW/sERdqCKYCwN5kJrKtnOt + PhvP66PdWAGDfnJfr+5tQ8hKQ/U1vwIDAQABAoICAQCL2aAIv4MGJ2zpq10oBryi + y8v4eHpkqobDcWK9ip8NGl+2em7t9HLWOZAWdboosAsCLL8wJeL/OKvRWFKJD9Tz + m4S3FAi0VKHCMaC/t4aIj5QXWd676Y41F7tQn1kE9kDh/oCBdrVnEbuVGM+wLQ4x + 0g9ovMmQ8K59ZPUVefZycEM4io6pF71cW0zfgHftHtNgLYzuhTWBCYPd9ZjDrRCI + fUArajS4Ti7OpSOB948vshVukfcfG4O21pQeo0NWT8MRpzXX6Sc2rJAehXwhIqEU + bTjIEAIMh/RoNNOR2rqJqFIdi3Ad6dsDXB1XJYXct39vXQZfRqCOC/oK0pZVQwxm + aMbb6VzMjE/paHcBLKorvSIEpuAkgesUkqJeMPxhVnVG6Tg5Xl0WM0pCh/mfir6i + gFGz/xXb0h8pj9Ksk6QpTOTqDf9JAHCuhp9hnuUR+wpnfKyOfOoDXfAyKjHR0bXz + XF9DhycErHDY4CWlhFiu8+qzrtR/sZ/AIp2MfjOzBZYoq7Zj2Z3yXDsvr5fpXUW8 + EU+ClnE/dgRBj5z1sKEQd471+R7PU3Q5apw3YlQZClsaaciTIeWOMOwBjxm9PbZL + CX9BzYaobVAy19ib+/7JgqNxsZ/3gL2xBQU1JoKeY2GnAyyyr8arLZaFR/CUGYyV + SWOdWwLxgThXIJofA3c5QQKCAQEA701sUqcRN8W/m6nCcav8I2EMU/4h18J3bk88 + NbK8vCsDvvFl/2EcjU/6iKcuxhNg1CjHD96H42FeXVXApkdnf2pF24nJHW8M18yH + uwPNzIDnLn9LSN6nJsgyo5LuVCXhf2C4UImv9P3Ae1meI/ApBJsad/bAY8MMHwtS + G/ph/yzhbAb2xF4oJwgOXBm0G2c9sfA0OlHSvYM/kvsQE6770BQ5S1ltrfIv++4J + qydiJ0Hq0RFM4aHCCi02cWp+43ALhh3EAPHN3ANpmV1IQKqyAeRFX1sqQuqpryQs + wHQxdF9FLCXHwaF8JOwheu9MTclUZdrkIRf2xac2qdFIszxCkQKCAQEAz1pHtm+f + HYJdOT3XKKgri4/OPZ7nzb1NcT34NbOPKmhRi38ZxutKrFTy+DCtpjUwF4YlE7oa + r13upHaxgxHMa9cpLN+69hmT2Pg2oMBvfEIkAiDKt4dcFQBDDKw98lpXNIStsCDp + nRcToI2TO1AMJNWCulAONov9vGggjS7mxt76cQ2QZH4k6W4yYDcC6g311vR+obA9 + MwJxZfuESw1CLzvE8Ua0esQnXQzpwECC05Q6oObeJ/44huQF7R2MP5iEmDLkgYjj + G5cmHAdD3u0Ceol3zFqF0YDxcfuglMvpmdBpjNj2rl093ufziy84iVTXJ50CRceS + e17et+3kKNF7TwKCAQBJpEHZjaA20ZwNg0hbQtns6Uip8GLpyuaGA8w7mi2KmpIk + iJUi6fenZR1sQEacngoGQCZCM/ENgEFR57nJcd/fzgyBav2BGVOSdVavrpP+gwyh + unqoihxWSvWKcQT20FF8qX8PCdAkTJKXYxTPanC1AiY7FKxQBw4L36f9BCh0JpOY + cuwtsewZVtlUbnSGmlbaE1l/OP7pYyKFUM25wPetKQwYrAScqxMpLC+9g/koq5hf + jjtilCzqhM9kR6mUxD5Hn5FZ2Q/IzSQKFjLN87mj62ON3Lg8r4pYY4GCGD+/2DGp + TFcUt2VE14XWFx4cMgDO93WM2ZsPaE3iJI2C2uCBAoIBADGmr5da4SICzmnfif7d + ThgMJlmRDHayhrHAIghR581Cz4v0smp0exwK92dA2MP85ngrkgNIRA2ME5HkLhtx + jp6gFeb959n4Q/Pnc8VIbym0+MRdr80Ep6MLvgJx2B+JTGpx/tk2+Fm6ZePDIudI + ArBrQ/NzKgQbv3V3BZxpB6/FQvkBQ3sczZ/r2Do70gHTt/Nx9kSnW/Az/I1sDcCe + +yMuT7lqsdrXz4kzh2GW0Pzy+JsAzV+MO2LphRXDRosP7Wg4f4kZCzDXH7QEdVcT + L83BzyLq5jJFiws9MrWOonBHfI7SgTc9coxGxIWmmAYif6anrRyibkwGapRmbYTs + rHcCggEATsKrZHJkZIfxVdw1uELZxDssxtSd3KS09xN2aypGPdSvWg2Di3NbQsNt + 4xSljnjWsYLZpxKYv3dUOOJIiIFGxVCPNCF1vL3ofqrBelXF3AAICa+ktzPQqBDI + eGPj1/h/HodY2pVHVyhZmFFsFy8We/wD64QRx4xI0w9xFAt0qmKVMoCsJmdrXGcO + kYqZnhkq3OyCswrk78OvFcB2Wnk7SWH2tYhBhOqFv8uPojaiRLOb/6xZaZOA9TPi + 0mpJScl+pVxs1UGShVH74lIvhPaPq0AHgK1y1yYphKc1A07l2z0+S1tSYOvdQY8k + NuJLvtwCMGDCxhdYm7OrJ0aUfZzP6w== + -----END PRIVATE KEY----- + pass: password + tokenURL: http://httpbin:9100/response-headers?access_token=faketoken&token_type=Bearer + user: admin +type: Opaque +--- +apiVersion: v1 +kind: Secret metadata: labels: app: minio @@ -1299,44 +1611,49 @@ apiVersion: v1 kind: Service metadata: labels: - app: minio - name: minio + app: httpbin + name: httpbin spec: ports: - - port: 9000 + - name: api + port: 9100 protocol: TCP - targetPort: 9000 + targetPort: 80 selector: - app: minio + app: httpbin --- apiVersion: v1 kind: Service metadata: labels: - app: mysql - name: mysql + app: minio + name: minio spec: ports: - - port: 3306 + - name: api + port: 9000 protocol: TCP - targetPort: 3306 + targetPort: 9000 + - name: dashboard + port: 9001 + protocol: TCP + targetPort: 9001 selector: - app: mysql + app: minio --- apiVersion: v1 kind: Service metadata: labels: - app: workflow-controller - name: workflow-controller-metrics + app: mysql + name: mysql spec: ports: - - name: metrics - port: 9090 + - port: 3306 protocol: TCP - targetPort: 9090 + targetPort: 3306 selector: - app: workflow-controller + app: mysql --- apiVersion: scheduling.k8s.io/v1 kind: PriorityClass @@ -1379,9 +1696,12 @@ spec: initialDelaySeconds: 10 periodSeconds: 20 securityContext: + allowPrivilegeEscalation: false capabilities: drop: - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true volumeMounts: - mountPath: /tmp name: tmp @@ -1396,6 +1716,40 @@ spec: --- apiVersion: apps/v1 kind: Deployment +metadata: + labels: + app: httpbin + name: httpbin +spec: + selector: + matchLabels: + app: httpbin + template: + metadata: + labels: + app: httpbin + spec: + containers: + - image: kennethreitz/httpbin:latest + livenessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 + name: main + ports: + - containerPort: 80 + name: api + readinessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 +--- +apiVersion: apps/v1 +kind: Deployment metadata: labels: app: minio @@ -1413,6 +1767,8 @@ spec: - command: - minio - server + - --console-address + - :9001 - /data env: - name: MINIO_ACCESS_KEY @@ -1436,6 +1792,9 @@ spec: name: main ports: - containerPort: 9000 + name: api + - containerPort: 9001 + name: dashboard readinessProbe: httpGet: path: /minio/health/ready diff --git a/manifests/quick-start-postgres.yaml b/manifests/quick-start-postgres.yaml index 79529e2853dc..97ac6c6ff1de 100644 --- a/manifests/quick-start-postgres.yaml +++ b/manifests/quick-start-postgres.yaml @@ -79,6 +79,48 @@ spec: --- apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition +metadata: + name: workflowartifactgctasks.argoproj.io +spec: + group: argoproj.io + names: + kind: WorkflowArtifactGCTask + listKind: WorkflowArtifactGCTaskList + plural: workflowartifactgctasks + shortNames: + - wfat + singular: workflowartifactgctask + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + status: + type: object + x-kubernetes-map-type: atomic + x-kubernetes-preserve-unknown-fields: true + required: + - metadata + - spec + type: object + served: true + storage: true + subresources: + status: {} +--- +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition metadata: name: workfloweventbindings.argoproj.io spec: @@ -138,6 +180,11 @@ spec: jsonPath: .status.startedAt name: Age type: date + - description: Human readable message indicating details about why the workflow + is in this condition. + jsonPath: .status.message + name: Message + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -209,6 +256,29 @@ spec: type: object archiveLogs: type: boolean + artifactGC: + properties: + podMetadata: + properties: + annotations: + additionalProperties: + type: string + type: object + labels: + additionalProperties: + type: string + type: object + type: object + serviceAccountName: + type: string + strategy: + enum: + - "" + - OnWorkflowCompletion + - OnWorkflowDeletion + - Never + type: string + type: object artifactory: properties: passwordSecret: @@ -238,6 +308,34 @@ spec: required: - url type: object + azure: + properties: + accountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + blob: + type: string + container: + type: string + endpoint: + type: string + useSDKCreds: + type: boolean + required: + - blob + - container + - endpoint + type: object + deleted: + type: boolean from: type: string fromExpression: @@ -264,6 +362,8 @@ spec: type: object git: properties: + branch: + type: string depth: format: int64 type: integer @@ -290,6 +390,8 @@ spec: type: string revision: type: string + singleBranch: + type: boolean sshPrivateKeySecret: properties: key: @@ -373,6 +475,110 @@ spec: type: object http: properties: + auth: + properties: + basicAuth: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + clientCert: + properties: + clientCertSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + oauth2: + properties: + clientIDSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + clientSecretSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + endpointParams: + items: + properties: + key: + type: string + value: + type: string + required: + - key + type: object + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + type: object headers: items: properties: @@ -758,6 +964,7 @@ rules: - workflows/finalizers - workflowtasksets - workflowtasksets/finalizers + - workflowartifactgctasks verbs: - get - list @@ -775,13 +982,6 @@ rules: - get - list - watch -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - apiGroups: - argoproj.io resources: @@ -851,8 +1051,6 @@ rules: verbs: - get - create - - list - - watch - apiGroups: - "" resources: @@ -901,6 +1099,28 @@ rules: --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role +metadata: + annotations: + workflows.argoproj.io/description: | + This is the minimum recommended permissions needed if you want to use artifact GC. + name: artifactgc +rules: +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks + verbs: + - list + - watch +- apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks/status + verbs: + - patch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role metadata: annotations: workflows.argoproj.io/description: | @@ -1045,6 +1265,18 @@ subjects: --- apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding +metadata: + name: artifactgc-default +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: artifactgc +subjects: +- kind: ServiceAccount + name: default +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding metadata: name: executor-default roleRef: @@ -1164,29 +1396,14 @@ data: secretKeySecret: name: my-minio-cred key: secretkey - containerRuntimeExecutors: | - - name: emissary - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: emissary - - name: pns - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: pns executor: | resources: requests: cpu: 10m memory: 64Mi images: | - argoproj/argosay:v1: - command: [cowsay] - argoproj/argosay:v2: - command: [/argosay] docker/whalesay:latest: - command: [cowsay] - python:alpine3.6: - command: [python3] + cmd: [cowsay] links: | - name: Workflow Link scope: workflow @@ -1274,6 +1491,101 @@ stringData: --- apiVersion: v1 kind: Secret +metadata: + labels: + app: httpbin + name: my-httpbin-cred +stringData: + cert.pem: | + -----BEGIN CERTIFICATE----- + MIIEmjCCAoICCQDQejieQSZTxzANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0 + ZXN0MB4XDTIyMDQyNTEzNDc0MloXDTMyMDQyMjEzNDc0MlowDzENMAsGA1UEAwwE + dGVzdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHT/tfskuXizar1 + 5DDrSkaT1cuCdQhEO7b6haxfvfMJPY9sxaxR570bw5TWQzA0xdAeUzSCbRsvxw6b + fEyLD4NajdXtcKocYUUcLclzjgyogTDPqlzAfDVZD25ySOTZ150pQaBuIi6TgnqH + WdJEh9w5//5VZmKyMx49JZMW7ADb9qYxkKVPIan3aNEXOO4SxyjsSekUFefkZOld + /RVZ8nO8hnDQ7r5NXsIIWVh35A94CA8y6QpKL2qiEFW1fofRcr/Fe/Y/5ohBQ1Ur + NMcX87zm9kXX1y6wbp3wn5f1PUa1sCUPlxChmRmPPmr4yIqq0a8C1d71jOIbhkox + 7A30HsP1D3rdxU6eb7KBYb7kShZge1batHRogRe5uX6hGO8iHBV/GdDE6jszoGPU + ejhfwblr6AeR6ImrWmrJ4rAx/jNqcHPuktnMRlLsBzdhqRwelwgnN13O5ZYiEJg4 + X3YYp678kHnc58aOkhG2nM32cIGha4tkoGM/GpDnFAd0P0gyJVwKo2A2Wc4cMlzQ + 7dokXbkkzK6lrHJnJjiOfzjD5yMB1Q1zQXKGHB2hJSWAMTjJ9f6qQd3ZaarYPTLx + vc4WTu+547Sx81Vlnes2xTSgt6pyFSBppHpS7KkOxb+wRF2oIpgLA3mQmsq2c60+ + G8/ro91YAYN+cl+v7m1DyEpD9TW/AgMBAAEwDQYJKoZIhvcNAQELBQADggIBACO7 + 2hU2BSGU66FwpIOihgcaADH0SwokjrEJVXlnMv26JzG/Ja63gTNE5OyghufsJtUi + E7E1gOH+dH6lVOIEmQdgGZazGxye20diLlicBATa5W2IuaBzb8Bq7ap75jOB7/sH + Yh+ZV9w0CWgV7KgzJQsp6KPfpMUXn9aJkRkLlCToCj60tC1agw5wzQcokDhOMJaY + 49FFVoKtVYwN6DfXL5Qi4GUmg7NwMUQAOGD6BQ8VLdbSJoWSHvgR2z5SDIubpdyy + XDe2V6lusdka8jdRsFH+TUKyGubs3c5YVq80A8itavxPXBUM/OJCHhUA1VpL3rvz + VgANVV7XFn5fN5TdTOrgJa2LBjflYBC3KiLf1jiW68ZT2rLDrC0yVdHFY0UJG/du + kWWQpZTfdpGbZOl1rQcYQ3BREWkr5kAv8Sh3sPliibVRvyFzwAqpEUDbpCz/Z3kZ + mRPU1Ukz8gjr5FBwzNn4x/l+80kgM22qXLMgxf7cqSLxH+dylmIieLGU0s1k7BqK + Dw77DP1QZe4G6WwrdGooxSYSBn4joKV4TI9sbyd34HJQnkMch0ugz9dlpZyT1P8Y + 3xU8Qj1BIF8yoyRuzbOokd9cEjNC6N+Z4g5lLEKYM/j1f0r3tGEoZAu2p39UGLa8 + aszMnFjeymK5OCkMUhg/KNr4WK58pc/3uFMhy8bn + -----END CERTIFICATE----- + clientID: admin + clientSecret: password + key.pem: | + -----BEGIN PRIVATE KEY----- + MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDB0/7X7JLl4s2q + 9eQw60pGk9XLgnUIRDu2+oWsX73zCT2PbMWsUee9G8OU1kMwNMXQHlM0gm0bL8cO + m3xMiw+DWo3V7XCqHGFFHC3Jc44MqIEwz6pcwHw1WQ9uckjk2dedKUGgbiIuk4J6 + h1nSRIfcOf/+VWZisjMePSWTFuwA2/amMZClTyGp92jRFzjuEsco7EnpFBXn5GTp + Xf0VWfJzvIZw0O6+TV7CCFlYd+QPeAgPMukKSi9qohBVtX6H0XK/xXv2P+aIQUNV + KzTHF/O85vZF19cusG6d8J+X9T1GtbAlD5cQoZkZjz5q+MiKqtGvAtXe9YziG4ZK + MewN9B7D9Q963cVOnm+ygWG+5EoWYHtW2rR0aIEXubl+oRjvIhwVfxnQxOo7M6Bj + 1Ho4X8G5a+gHkeiJq1pqyeKwMf4zanBz7pLZzEZS7Ac3YakcHpcIJzddzuWWIhCY + OF92GKeu/JB53OfGjpIRtpzN9nCBoWuLZKBjPxqQ5xQHdD9IMiVcCqNgNlnOHDJc + 0O3aJF25JMyupaxyZyY4jn84w+cjAdUNc0FyhhwdoSUlgDE4yfX+qkHd2Wmq2D0y + 8b3OFk7vueO0sfNVZZ3rNsU0oLeqchUgaaR6UuypDsW/sERdqCKYCwN5kJrKtnOt + PhvP66PdWAGDfnJfr+5tQ8hKQ/U1vwIDAQABAoICAQCL2aAIv4MGJ2zpq10oBryi + y8v4eHpkqobDcWK9ip8NGl+2em7t9HLWOZAWdboosAsCLL8wJeL/OKvRWFKJD9Tz + m4S3FAi0VKHCMaC/t4aIj5QXWd676Y41F7tQn1kE9kDh/oCBdrVnEbuVGM+wLQ4x + 0g9ovMmQ8K59ZPUVefZycEM4io6pF71cW0zfgHftHtNgLYzuhTWBCYPd9ZjDrRCI + fUArajS4Ti7OpSOB948vshVukfcfG4O21pQeo0NWT8MRpzXX6Sc2rJAehXwhIqEU + bTjIEAIMh/RoNNOR2rqJqFIdi3Ad6dsDXB1XJYXct39vXQZfRqCOC/oK0pZVQwxm + aMbb6VzMjE/paHcBLKorvSIEpuAkgesUkqJeMPxhVnVG6Tg5Xl0WM0pCh/mfir6i + gFGz/xXb0h8pj9Ksk6QpTOTqDf9JAHCuhp9hnuUR+wpnfKyOfOoDXfAyKjHR0bXz + XF9DhycErHDY4CWlhFiu8+qzrtR/sZ/AIp2MfjOzBZYoq7Zj2Z3yXDsvr5fpXUW8 + EU+ClnE/dgRBj5z1sKEQd471+R7PU3Q5apw3YlQZClsaaciTIeWOMOwBjxm9PbZL + CX9BzYaobVAy19ib+/7JgqNxsZ/3gL2xBQU1JoKeY2GnAyyyr8arLZaFR/CUGYyV + SWOdWwLxgThXIJofA3c5QQKCAQEA701sUqcRN8W/m6nCcav8I2EMU/4h18J3bk88 + NbK8vCsDvvFl/2EcjU/6iKcuxhNg1CjHD96H42FeXVXApkdnf2pF24nJHW8M18yH + uwPNzIDnLn9LSN6nJsgyo5LuVCXhf2C4UImv9P3Ae1meI/ApBJsad/bAY8MMHwtS + G/ph/yzhbAb2xF4oJwgOXBm0G2c9sfA0OlHSvYM/kvsQE6770BQ5S1ltrfIv++4J + qydiJ0Hq0RFM4aHCCi02cWp+43ALhh3EAPHN3ANpmV1IQKqyAeRFX1sqQuqpryQs + wHQxdF9FLCXHwaF8JOwheu9MTclUZdrkIRf2xac2qdFIszxCkQKCAQEAz1pHtm+f + HYJdOT3XKKgri4/OPZ7nzb1NcT34NbOPKmhRi38ZxutKrFTy+DCtpjUwF4YlE7oa + r13upHaxgxHMa9cpLN+69hmT2Pg2oMBvfEIkAiDKt4dcFQBDDKw98lpXNIStsCDp + nRcToI2TO1AMJNWCulAONov9vGggjS7mxt76cQ2QZH4k6W4yYDcC6g311vR+obA9 + MwJxZfuESw1CLzvE8Ua0esQnXQzpwECC05Q6oObeJ/44huQF7R2MP5iEmDLkgYjj + G5cmHAdD3u0Ceol3zFqF0YDxcfuglMvpmdBpjNj2rl093ufziy84iVTXJ50CRceS + e17et+3kKNF7TwKCAQBJpEHZjaA20ZwNg0hbQtns6Uip8GLpyuaGA8w7mi2KmpIk + iJUi6fenZR1sQEacngoGQCZCM/ENgEFR57nJcd/fzgyBav2BGVOSdVavrpP+gwyh + unqoihxWSvWKcQT20FF8qX8PCdAkTJKXYxTPanC1AiY7FKxQBw4L36f9BCh0JpOY + cuwtsewZVtlUbnSGmlbaE1l/OP7pYyKFUM25wPetKQwYrAScqxMpLC+9g/koq5hf + jjtilCzqhM9kR6mUxD5Hn5FZ2Q/IzSQKFjLN87mj62ON3Lg8r4pYY4GCGD+/2DGp + TFcUt2VE14XWFx4cMgDO93WM2ZsPaE3iJI2C2uCBAoIBADGmr5da4SICzmnfif7d + ThgMJlmRDHayhrHAIghR581Cz4v0smp0exwK92dA2MP85ngrkgNIRA2ME5HkLhtx + jp6gFeb959n4Q/Pnc8VIbym0+MRdr80Ep6MLvgJx2B+JTGpx/tk2+Fm6ZePDIudI + ArBrQ/NzKgQbv3V3BZxpB6/FQvkBQ3sczZ/r2Do70gHTt/Nx9kSnW/Az/I1sDcCe + +yMuT7lqsdrXz4kzh2GW0Pzy+JsAzV+MO2LphRXDRosP7Wg4f4kZCzDXH7QEdVcT + L83BzyLq5jJFiws9MrWOonBHfI7SgTc9coxGxIWmmAYif6anrRyibkwGapRmbYTs + rHcCggEATsKrZHJkZIfxVdw1uELZxDssxtSd3KS09xN2aypGPdSvWg2Di3NbQsNt + 4xSljnjWsYLZpxKYv3dUOOJIiIFGxVCPNCF1vL3ofqrBelXF3AAICa+ktzPQqBDI + eGPj1/h/HodY2pVHVyhZmFFsFy8We/wD64QRx4xI0w9xFAt0qmKVMoCsJmdrXGcO + kYqZnhkq3OyCswrk78OvFcB2Wnk7SWH2tYhBhOqFv8uPojaiRLOb/6xZaZOA9TPi + 0mpJScl+pVxs1UGShVH74lIvhPaPq0AHgK1y1yYphKc1A07l2z0+S1tSYOvdQY8k + NuJLvtwCMGDCxhdYm7OrJ0aUfZzP6w== + -----END PRIVATE KEY----- + pass: password + tokenURL: http://httpbin:9100/response-headers?access_token=faketoken&token_type=Bearer + user: admin +type: Opaque +--- +apiVersion: v1 +kind: Secret metadata: labels: app: minio @@ -1299,44 +1611,49 @@ apiVersion: v1 kind: Service metadata: labels: - app: minio - name: minio + app: httpbin + name: httpbin spec: ports: - - port: 9000 + - name: api + port: 9100 protocol: TCP - targetPort: 9000 + targetPort: 80 selector: - app: minio + app: httpbin --- apiVersion: v1 kind: Service metadata: labels: - app: postgres - name: postgres + app: minio + name: minio spec: ports: - - port: 5432 + - name: api + port: 9000 protocol: TCP - targetPort: 5432 + targetPort: 9000 + - name: dashboard + port: 9001 + protocol: TCP + targetPort: 9001 selector: - app: postgres + app: minio --- apiVersion: v1 kind: Service metadata: labels: - app: workflow-controller - name: workflow-controller-metrics + app: postgres + name: postgres spec: ports: - - name: metrics - port: 9090 + - port: 5432 protocol: TCP - targetPort: 9090 + targetPort: 5432 selector: - app: workflow-controller + app: postgres --- apiVersion: scheduling.k8s.io/v1 kind: PriorityClass @@ -1379,9 +1696,12 @@ spec: initialDelaySeconds: 10 periodSeconds: 20 securityContext: + allowPrivilegeEscalation: false capabilities: drop: - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true volumeMounts: - mountPath: /tmp name: tmp @@ -1396,6 +1716,40 @@ spec: --- apiVersion: apps/v1 kind: Deployment +metadata: + labels: + app: httpbin + name: httpbin +spec: + selector: + matchLabels: + app: httpbin + template: + metadata: + labels: + app: httpbin + spec: + containers: + - image: kennethreitz/httpbin:latest + livenessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 + name: main + ports: + - containerPort: 80 + name: api + readinessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 +--- +apiVersion: apps/v1 +kind: Deployment metadata: labels: app: minio @@ -1413,6 +1767,8 @@ spec: - command: - minio - server + - --console-address + - :9001 - /data env: - name: MINIO_ACCESS_KEY @@ -1436,6 +1792,9 @@ spec: name: main ports: - containerPort: 9000 + name: api + - containerPort: 9001 + name: dashboard readinessProbe: httpGet: path: /minio/health/ready diff --git a/manifests/quick-start/base/artifactgc-default-rolebinding.yaml b/manifests/quick-start/base/artifactgc-default-rolebinding.yaml new file mode 100644 index 000000000000..8a1b668db000 --- /dev/null +++ b/manifests/quick-start/base/artifactgc-default-rolebinding.yaml @@ -0,0 +1,11 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: artifactgc-default +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: artifactgc +subjects: + - kind: ServiceAccount + name: default diff --git a/manifests/quick-start/base/artifactgc-role.yaml b/manifests/quick-start/base/artifactgc-role.yaml new file mode 100644 index 000000000000..11fd836d9421 --- /dev/null +++ b/manifests/quick-start/base/artifactgc-role.yaml @@ -0,0 +1,22 @@ +# https://argoproj.github.io/argo-workflows/workflow-rbac/ +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: artifactgc + annotations: + workflows.argoproj.io/description: | + This is the minimum recommended permissions needed if you want to use artifact GC. +rules: + - apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks + verbs: + - list + - watch + - apiGroups: + - argoproj.io + resources: + - workflowartifactgctasks/status + verbs: + - patch diff --git a/manifests/quick-start/base/executor/docker/executor-role.yaml b/manifests/quick-start/base/executor/docker/executor-role.yaml index 3ee284a9c0d7..fd4349b59161 100644 --- a/manifests/quick-start/base/executor/docker/executor-role.yaml +++ b/manifests/quick-start/base/executor/docker/executor-role.yaml @@ -7,6 +7,7 @@ metadata: Recommended minimum permissions for the `docker` executor. This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. + workflows.argoproj.io/version: "< 3.4.0" rules: - apiGroups: - argoproj.io diff --git a/manifests/quick-start/base/executor/k8sapi/executor-role.yaml b/manifests/quick-start/base/executor/k8sapi/executor-role.yaml index a7c9e2ac4f04..7685a1445b74 100644 --- a/manifests/quick-start/base/executor/k8sapi/executor-role.yaml +++ b/manifests/quick-start/base/executor/k8sapi/executor-role.yaml @@ -7,6 +7,7 @@ metadata: Recommended minimum permissions for `k8siapi` executor. This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. + workflows.argoproj.io/version: "< 3.4.0" rules: - apiGroups: - argoproj.io diff --git a/manifests/quick-start/base/executor/kubelet/executor-role.yaml b/manifests/quick-start/base/executor/kubelet/executor-role.yaml index 94ada0b5279e..341b298c3d9c 100644 --- a/manifests/quick-start/base/executor/kubelet/executor-role.yaml +++ b/manifests/quick-start/base/executor/kubelet/executor-role.yaml @@ -7,6 +7,7 @@ metadata: Recommended minimum permissions for `kubelet` executor. This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. + workflows.argoproj.io/version: "< 3.4.0" rules: - apiGroups: - argoproj.io diff --git a/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml b/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml index 6f79bc1ac337..914730194696 100644 --- a/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml +++ b/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml @@ -2,6 +2,8 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: kubelet-executor + annotations: + workflows.argoproj.io/version: "< 3.4.0" rules: # This allows the kubelet executor. - apiGroups: diff --git a/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml b/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml index f0aff8e6c480..e754dab2487c 100644 --- a/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml +++ b/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml @@ -2,6 +2,8 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: kubelet-executor-default + annotations: + workflows.argoproj.io/version: "< 3.4.0" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/manifests/quick-start/base/executor/pns/executor-role.yaml b/manifests/quick-start/base/executor/pns/executor-role.yaml index 27d506bc5b00..ea5d7a4d1e13 100644 --- a/manifests/quick-start/base/executor/pns/executor-role.yaml +++ b/manifests/quick-start/base/executor/pns/executor-role.yaml @@ -5,6 +5,7 @@ metadata: annotations: workflows.argoproj.io/description: | Recomended minimum permissions for `pns` executor. + workflows.argoproj.io/version: "< 3.4.0" rules: - apiGroups: - argoproj.io diff --git a/manifests/quick-start/base/httpbin/httpbin-deploy.yaml b/manifests/quick-start/base/httpbin/httpbin-deploy.yaml new file mode 100644 index 000000000000..46054b652cd1 --- /dev/null +++ b/manifests/quick-start/base/httpbin/httpbin-deploy.yaml @@ -0,0 +1,33 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: httpbin + labels: + app: httpbin +spec: + selector: + matchLabels: + app: httpbin + template: + metadata: + labels: + app: httpbin + spec: + containers: + - name: main + image: kennethreitz/httpbin:latest + ports: + - containerPort: 80 + name: api + readinessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 + livenessProbe: + httpGet: + path: /get + port: 80 + initialDelaySeconds: 5 + periodSeconds: 10 diff --git a/manifests/quick-start/base/httpbin/httpbin-service.yaml b/manifests/quick-start/base/httpbin/httpbin-service.yaml new file mode 100644 index 000000000000..f6aa3d11521c --- /dev/null +++ b/manifests/quick-start/base/httpbin/httpbin-service.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: httpbin + labels: + app: httpbin +spec: + selector: + app: httpbin + ports: + - port: 9100 + name: api + protocol: TCP + targetPort: 80 diff --git a/manifests/quick-start/base/httpbin/kustomization.yaml b/manifests/quick-start/base/httpbin/kustomization.yaml new file mode 100644 index 000000000000..61f9aaf57d4e --- /dev/null +++ b/manifests/quick-start/base/httpbin/kustomization.yaml @@ -0,0 +1,7 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - httpbin-deploy.yaml + - httpbin-service.yaml + - my-httpbin-cred-secret.yaml diff --git a/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml b/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml new file mode 100644 index 000000000000..8e2d23f4805c --- /dev/null +++ b/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml @@ -0,0 +1,97 @@ +apiVersion: v1 +stringData: + # for basic auth authentication + user: admin + pass: password + # for oauth2 authentication + clientID: admin + clientSecret: password + tokenURL: "http://httpbin:9100/response-headers?access_token=faketoken&token_type=Bearer" # this URL will return a body with the "access_token" field set and can simulate an oauth token flow + # for client cert authentication + cert.pem: | + -----BEGIN CERTIFICATE----- + MIIEmjCCAoICCQDQejieQSZTxzANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0 + ZXN0MB4XDTIyMDQyNTEzNDc0MloXDTMyMDQyMjEzNDc0MlowDzENMAsGA1UEAwwE + dGVzdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHT/tfskuXizar1 + 5DDrSkaT1cuCdQhEO7b6haxfvfMJPY9sxaxR570bw5TWQzA0xdAeUzSCbRsvxw6b + fEyLD4NajdXtcKocYUUcLclzjgyogTDPqlzAfDVZD25ySOTZ150pQaBuIi6TgnqH + WdJEh9w5//5VZmKyMx49JZMW7ADb9qYxkKVPIan3aNEXOO4SxyjsSekUFefkZOld + /RVZ8nO8hnDQ7r5NXsIIWVh35A94CA8y6QpKL2qiEFW1fofRcr/Fe/Y/5ohBQ1Ur + NMcX87zm9kXX1y6wbp3wn5f1PUa1sCUPlxChmRmPPmr4yIqq0a8C1d71jOIbhkox + 7A30HsP1D3rdxU6eb7KBYb7kShZge1batHRogRe5uX6hGO8iHBV/GdDE6jszoGPU + ejhfwblr6AeR6ImrWmrJ4rAx/jNqcHPuktnMRlLsBzdhqRwelwgnN13O5ZYiEJg4 + X3YYp678kHnc58aOkhG2nM32cIGha4tkoGM/GpDnFAd0P0gyJVwKo2A2Wc4cMlzQ + 7dokXbkkzK6lrHJnJjiOfzjD5yMB1Q1zQXKGHB2hJSWAMTjJ9f6qQd3ZaarYPTLx + vc4WTu+547Sx81Vlnes2xTSgt6pyFSBppHpS7KkOxb+wRF2oIpgLA3mQmsq2c60+ + G8/ro91YAYN+cl+v7m1DyEpD9TW/AgMBAAEwDQYJKoZIhvcNAQELBQADggIBACO7 + 2hU2BSGU66FwpIOihgcaADH0SwokjrEJVXlnMv26JzG/Ja63gTNE5OyghufsJtUi + E7E1gOH+dH6lVOIEmQdgGZazGxye20diLlicBATa5W2IuaBzb8Bq7ap75jOB7/sH + Yh+ZV9w0CWgV7KgzJQsp6KPfpMUXn9aJkRkLlCToCj60tC1agw5wzQcokDhOMJaY + 49FFVoKtVYwN6DfXL5Qi4GUmg7NwMUQAOGD6BQ8VLdbSJoWSHvgR2z5SDIubpdyy + XDe2V6lusdka8jdRsFH+TUKyGubs3c5YVq80A8itavxPXBUM/OJCHhUA1VpL3rvz + VgANVV7XFn5fN5TdTOrgJa2LBjflYBC3KiLf1jiW68ZT2rLDrC0yVdHFY0UJG/du + kWWQpZTfdpGbZOl1rQcYQ3BREWkr5kAv8Sh3sPliibVRvyFzwAqpEUDbpCz/Z3kZ + mRPU1Ukz8gjr5FBwzNn4x/l+80kgM22qXLMgxf7cqSLxH+dylmIieLGU0s1k7BqK + Dw77DP1QZe4G6WwrdGooxSYSBn4joKV4TI9sbyd34HJQnkMch0ugz9dlpZyT1P8Y + 3xU8Qj1BIF8yoyRuzbOokd9cEjNC6N+Z4g5lLEKYM/j1f0r3tGEoZAu2p39UGLa8 + aszMnFjeymK5OCkMUhg/KNr4WK58pc/3uFMhy8bn + -----END CERTIFICATE----- + key.pem: | + -----BEGIN PRIVATE KEY----- + MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDB0/7X7JLl4s2q + 9eQw60pGk9XLgnUIRDu2+oWsX73zCT2PbMWsUee9G8OU1kMwNMXQHlM0gm0bL8cO + m3xMiw+DWo3V7XCqHGFFHC3Jc44MqIEwz6pcwHw1WQ9uckjk2dedKUGgbiIuk4J6 + h1nSRIfcOf/+VWZisjMePSWTFuwA2/amMZClTyGp92jRFzjuEsco7EnpFBXn5GTp + Xf0VWfJzvIZw0O6+TV7CCFlYd+QPeAgPMukKSi9qohBVtX6H0XK/xXv2P+aIQUNV + KzTHF/O85vZF19cusG6d8J+X9T1GtbAlD5cQoZkZjz5q+MiKqtGvAtXe9YziG4ZK + MewN9B7D9Q963cVOnm+ygWG+5EoWYHtW2rR0aIEXubl+oRjvIhwVfxnQxOo7M6Bj + 1Ho4X8G5a+gHkeiJq1pqyeKwMf4zanBz7pLZzEZS7Ac3YakcHpcIJzddzuWWIhCY + OF92GKeu/JB53OfGjpIRtpzN9nCBoWuLZKBjPxqQ5xQHdD9IMiVcCqNgNlnOHDJc + 0O3aJF25JMyupaxyZyY4jn84w+cjAdUNc0FyhhwdoSUlgDE4yfX+qkHd2Wmq2D0y + 8b3OFk7vueO0sfNVZZ3rNsU0oLeqchUgaaR6UuypDsW/sERdqCKYCwN5kJrKtnOt + PhvP66PdWAGDfnJfr+5tQ8hKQ/U1vwIDAQABAoICAQCL2aAIv4MGJ2zpq10oBryi + y8v4eHpkqobDcWK9ip8NGl+2em7t9HLWOZAWdboosAsCLL8wJeL/OKvRWFKJD9Tz + m4S3FAi0VKHCMaC/t4aIj5QXWd676Y41F7tQn1kE9kDh/oCBdrVnEbuVGM+wLQ4x + 0g9ovMmQ8K59ZPUVefZycEM4io6pF71cW0zfgHftHtNgLYzuhTWBCYPd9ZjDrRCI + fUArajS4Ti7OpSOB948vshVukfcfG4O21pQeo0NWT8MRpzXX6Sc2rJAehXwhIqEU + bTjIEAIMh/RoNNOR2rqJqFIdi3Ad6dsDXB1XJYXct39vXQZfRqCOC/oK0pZVQwxm + aMbb6VzMjE/paHcBLKorvSIEpuAkgesUkqJeMPxhVnVG6Tg5Xl0WM0pCh/mfir6i + gFGz/xXb0h8pj9Ksk6QpTOTqDf9JAHCuhp9hnuUR+wpnfKyOfOoDXfAyKjHR0bXz + XF9DhycErHDY4CWlhFiu8+qzrtR/sZ/AIp2MfjOzBZYoq7Zj2Z3yXDsvr5fpXUW8 + EU+ClnE/dgRBj5z1sKEQd471+R7PU3Q5apw3YlQZClsaaciTIeWOMOwBjxm9PbZL + CX9BzYaobVAy19ib+/7JgqNxsZ/3gL2xBQU1JoKeY2GnAyyyr8arLZaFR/CUGYyV + SWOdWwLxgThXIJofA3c5QQKCAQEA701sUqcRN8W/m6nCcav8I2EMU/4h18J3bk88 + NbK8vCsDvvFl/2EcjU/6iKcuxhNg1CjHD96H42FeXVXApkdnf2pF24nJHW8M18yH + uwPNzIDnLn9LSN6nJsgyo5LuVCXhf2C4UImv9P3Ae1meI/ApBJsad/bAY8MMHwtS + G/ph/yzhbAb2xF4oJwgOXBm0G2c9sfA0OlHSvYM/kvsQE6770BQ5S1ltrfIv++4J + qydiJ0Hq0RFM4aHCCi02cWp+43ALhh3EAPHN3ANpmV1IQKqyAeRFX1sqQuqpryQs + wHQxdF9FLCXHwaF8JOwheu9MTclUZdrkIRf2xac2qdFIszxCkQKCAQEAz1pHtm+f + HYJdOT3XKKgri4/OPZ7nzb1NcT34NbOPKmhRi38ZxutKrFTy+DCtpjUwF4YlE7oa + r13upHaxgxHMa9cpLN+69hmT2Pg2oMBvfEIkAiDKt4dcFQBDDKw98lpXNIStsCDp + nRcToI2TO1AMJNWCulAONov9vGggjS7mxt76cQ2QZH4k6W4yYDcC6g311vR+obA9 + MwJxZfuESw1CLzvE8Ua0esQnXQzpwECC05Q6oObeJ/44huQF7R2MP5iEmDLkgYjj + G5cmHAdD3u0Ceol3zFqF0YDxcfuglMvpmdBpjNj2rl093ufziy84iVTXJ50CRceS + e17et+3kKNF7TwKCAQBJpEHZjaA20ZwNg0hbQtns6Uip8GLpyuaGA8w7mi2KmpIk + iJUi6fenZR1sQEacngoGQCZCM/ENgEFR57nJcd/fzgyBav2BGVOSdVavrpP+gwyh + unqoihxWSvWKcQT20FF8qX8PCdAkTJKXYxTPanC1AiY7FKxQBw4L36f9BCh0JpOY + cuwtsewZVtlUbnSGmlbaE1l/OP7pYyKFUM25wPetKQwYrAScqxMpLC+9g/koq5hf + jjtilCzqhM9kR6mUxD5Hn5FZ2Q/IzSQKFjLN87mj62ON3Lg8r4pYY4GCGD+/2DGp + TFcUt2VE14XWFx4cMgDO93WM2ZsPaE3iJI2C2uCBAoIBADGmr5da4SICzmnfif7d + ThgMJlmRDHayhrHAIghR581Cz4v0smp0exwK92dA2MP85ngrkgNIRA2ME5HkLhtx + jp6gFeb959n4Q/Pnc8VIbym0+MRdr80Ep6MLvgJx2B+JTGpx/tk2+Fm6ZePDIudI + ArBrQ/NzKgQbv3V3BZxpB6/FQvkBQ3sczZ/r2Do70gHTt/Nx9kSnW/Az/I1sDcCe + +yMuT7lqsdrXz4kzh2GW0Pzy+JsAzV+MO2LphRXDRosP7Wg4f4kZCzDXH7QEdVcT + L83BzyLq5jJFiws9MrWOonBHfI7SgTc9coxGxIWmmAYif6anrRyibkwGapRmbYTs + rHcCggEATsKrZHJkZIfxVdw1uELZxDssxtSd3KS09xN2aypGPdSvWg2Di3NbQsNt + 4xSljnjWsYLZpxKYv3dUOOJIiIFGxVCPNCF1vL3ofqrBelXF3AAICa+ktzPQqBDI + eGPj1/h/HodY2pVHVyhZmFFsFy8We/wD64QRx4xI0w9xFAt0qmKVMoCsJmdrXGcO + kYqZnhkq3OyCswrk78OvFcB2Wnk7SWH2tYhBhOqFv8uPojaiRLOb/6xZaZOA9TPi + 0mpJScl+pVxs1UGShVH74lIvhPaPq0AHgK1y1yYphKc1A07l2z0+S1tSYOvdQY8k + NuJLvtwCMGDCxhdYm7OrJ0aUfZzP6w== + -----END PRIVATE KEY----- +kind: Secret +metadata: + name: my-httpbin-cred + labels: + app: httpbin +type: Opaque diff --git a/manifests/quick-start/base/kustomization.yaml b/manifests/quick-start/base/kustomization.yaml index f8d0da97f921..325f94d4b12e 100644 --- a/manifests/quick-start/base/kustomization.yaml +++ b/manifests/quick-start/base/kustomization.yaml @@ -4,6 +4,7 @@ kind: Kustomization resources: - ../../namespace-install - minio + - httpbin - webhooks - argo-server-sso-secret.yaml - executor/emissary/executor-role.yaml @@ -14,9 +15,11 @@ resources: - workflow-manager-default-rolebinding.yaml - agent-role.yaml - agent-default-rolebinding.yaml + - artifactgc-role.yaml + - artifactgc-default-rolebinding.yaml - cluster-workflow-template-rbac.yaml - artifact-repositories-configmap.yaml patchesStrategicMerge: - overlays/workflow-controller-configmap.yaml - - overlays/argo-server-deployment.yaml + - overlays/argo-server-deployment.yaml \ No newline at end of file diff --git a/manifests/quick-start/base/minio/minio-deploy.yaml b/manifests/quick-start/base/minio/minio-deploy.yaml index 00d15782260d..849522b4550c 100644 --- a/manifests/quick-start/base/minio/minio-deploy.yaml +++ b/manifests/quick-start/base/minio/minio-deploy.yaml @@ -23,7 +23,10 @@ spec: value: password ports: - containerPort: 9000 - command: [minio, server, /data] + name: api + - containerPort: 9001 + name: dashboard + command: [minio, server, --console-address, ":9001", /data] lifecycle: postStart: exec: diff --git a/manifests/quick-start/base/minio/minio-service.yaml b/manifests/quick-start/base/minio/minio-service.yaml index dbdbdf3a04ee..96d3bc5c2d85 100644 --- a/manifests/quick-start/base/minio/minio-service.yaml +++ b/manifests/quick-start/base/minio/minio-service.yaml @@ -8,6 +8,11 @@ spec: selector: app: minio ports: - - protocol: TCP - port: 9000 + - port: 9000 + name: api + protocol: TCP targetPort: 9000 + - port: 9001 + name: dashboard + protocol: TCP + targetPort: 9001 diff --git a/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml b/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml index 51c7faefdb8b..c62fb16c3b08 100644 --- a/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml +++ b/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml @@ -5,24 +5,9 @@ data: requests: cpu: 10m memory: 64Mi - containerRuntimeExecutors: | - - name: emissary - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: emissary - - name: pns - selector: - matchLabels: - workflows.argoproj.io/container-runtime-executor: pns images: | - argoproj/argosay:v1: - command: [cowsay] - argoproj/argosay:v2: - command: [/argosay] docker/whalesay:latest: - command: [cowsay] - python:alpine3.6: - command: [python3] + cmd: [cowsay] artifactRepository: | s3: bucket: my-bucket diff --git a/mkdocs.yml b/mkdocs.yml index f08ef52a5827..b3851ff1323c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -3,19 +3,25 @@ repo_url: https://github.com/argoproj/argo-workflows strict: true theme: name: material + custom_dir: docs/overrides font: - text: Work Sans + text: Roboto + code: Roboto Mono logo: assets/logo.png palette: - - scheme: default - primary: blue - toggle: - icon: material/toggle-switch-off-outline - name: Switch to dark mode - - scheme: slate - toggle: - icon: material/toggle-switch - name: Switch to light mode + - scheme: default + primary: indigo + toggle: + icon: material/toggle-switch-off-outline + name: Switch to dark mode + - scheme: slate + toggle: + icon: material/toggle-switch + name: Switch to light mode + features: + - navigation.tabs + - navigation.tabs.sticky + - navigation.top extra: analytics: provider: google @@ -28,76 +34,114 @@ markdown_extensions: - toc: permalink: true nav: - - Overview: README.md - - core-concepts.md - - quick-start.md - - training.md + - Home: README.md + - Getting Started: + - quick-start.md + - training.md + - Walk Through: + - walk-through/index.md + - walk-through/argo-cli.md + - walk-through/hello-world.md + - walk-through/parameters.md + - walk-through/steps.md + - walk-through/dag.md + - walk-through/artifacts.md + - walk-through/the-structure-of-workflow-specs.md + - walk-through/secrets.md + - walk-through/scripts-and-results.md + - walk-through/output-parameters.md + - walk-through/loops.md + - walk-through/conditionals.md + - walk-through/retrying-failed-or-errored-steps.md + - walk-through/recursion.md + - walk-through/exit-handlers.md + - walk-through/timeouts.md + - walk-through/volumes.md + - walk-through/suspending.md + - walk-through/daemon-containers.md + - walk-through/sidecars.md + - walk-through/hardwired-artifacts.md + - walk-through/kubernetes-resources.md + - walk-through/docker-in-docker-using-sidecars.md + - walk-through/custom-template-variable-reference.md + - walk-through/continuous-integration-examples.md - User Guide: - # topics are ones that can be done using the UI only - - Beginner: - - workflow-concepts.md - - cli.md - - variables.md - # topics that don't require kubectl or re-configuration - - Intermediate: - - service-accounts.md - - workflow-rbac.md - - node-field-selector.md - - empty-dir.md + - workflow-concepts.md + - Custom Resource Kinds: - workflow-templates.md - - workflow-inputs.md - cluster-workflow-templates.md - cron-workflows.md - - cron-backfill.md - - templates.md + - Template Types: - http-template.md - container-set-template.md - - template-defaults.md - - work-avoidance.md - - enhanced-depends-logic.md - data-sourcing-and-transformation.md - - artifact-repository-ref.md + - inline-templates.md + - Artifacts: + - workflow-inputs.md - key-only-artifacts.md + - artifact-repository-ref.md - conditional-artifacts-parameters.md + - Access Control: + - service-accounts.md + - workflow-rbac.md + - Features: + # this is a bit of a dumping ground, I've tried to order with key features first + - variables.md + - retries.md + - lifecyclehook.md + - synchronization.md + - memoization.md + - template-defaults.md + - enhanced-depends-logic.md + - node-field-selector.md + - Status: - resource-duration.md - estimated-duration.md - - workflow-pod-security-context.md - progress.md - - lifecyclehook.md - workflow-creator.md - - synchronization.md + - Patterns: + - empty-dir.md + - cron-backfill.md - workflow-of-workflows.md - - memoization.md - - tolerating-pod-deletion.md - - widgets.md - - retries.md - # all other topics, including API access - - Advanced: - - workflow-restrictions.md - workflow-notifications.md + - work-avoidance.md + - UI Features: + - artifact-visualization.md + - widgets.md + - intermediate-inputs.md + - Debugging Tools: - workflow-events.md - - kubectl.md - - access-token.md - - swagger.md + - debug-pause.md + - API: - rest-api.md + - access-token.md - rest-examples.md - - client-libraries.md - events.md - webhooks.md - - submit-workflow-via-automation.md - workflow-submitting-workflow.md - - resuming-workflow-via-automation.md - async-pattern.md - - running-at-massive-scale.md - - debug-pause.md + - client-libraries.md + - swagger.md - Plugins: - plugins.md - executor_plugins.md - executor_swagger.md - plugin-directory.md + - Best Practices: + - workflow-pod-security-context.md + - tolerating-pod-deletion.md + - running-at-massive-scale.md + - Use Cases: + - use-cases/ci-cd.md + - use-cases/data-processing.md + - use-cases/infrastructure-automation.md + - use-cases/machine-learning.md + - use-cases/other.md + - use-cases/stream-processing.md + - use-cases/webhdfs.md + - FAQ: faq.md + - kubectl.md - ide-setup.md - - security.md - - Examples: https://github.com/argoproj/argo-workflows/blob/master/examples/README.md - Field Reference: fields.md - CLI Reference: - argo: cli/argo.md @@ -107,6 +151,8 @@ nav: - argo archive list: cli/argo_archive_list.md - argo archive list-label-keys: cli/argo_archive_list-label-keys.md - argo archive list-label-values: cli/argo_archive_list-label-values.md + - argo archive resubmit: cli/argo_archive_resubmit.md + - argo archive retry: cli/argo_archive_retry.md - argo auth: cli/argo_auth.md - argo auth token: cli/argo_auth_token.md - argo cluster-template: cli/argo_cluster-template.md @@ -116,6 +162,7 @@ nav: - argo cluster-template lint: cli/argo_cluster-template_lint.md - argo cluster-template list: cli/argo_cluster-template_list.md - argo completion: cli/argo_completion.md + - argo cp: cli/argo_cp.md - argo cron: cli/argo_cron.md - argo cron create: cli/argo_cron_create.md - argo cron delete: cli/argo_cron_delete.md @@ -151,45 +198,48 @@ nav: - argo watch: cli/argo_watch.md - Operator Manual: - installation.md - - upgrading.md - releases.md + - upgrading.md + - security.md - Configuration: - managed-namespace.md + - workflow-controller-configmap.md - configure-artifact-repository.md - configure-archive-logs.md - - workflow-controller-configmap.md - - workflow-executors.md - - sidecar-injection.md + - links.md + - environment-variables.md - default-workflow-specs.md - offloading-large-workflows.md - workflow-archive.md - metrics.md - - links.md + - workflow-executors.md + - workflow-restrictions.md + - sidecar-injection.md - Argo Server: - argo-server.md - argo-server-auth-mode.md - tls.md - argo-server-sso.md - argo-server-sso-argocd.md - - high-availability.md - - disaster-recovery.md - - scaling.md - - cost-optimisation.md + - Best Practices: + - high-availability.md + - disaster-recovery.md + - scaling.md + - cost-optimisation.md - windows.md - - environment-variables.md - Developer Guide: - CONTRIBUTING.md - - mentoring.md - architecture.md - running-locally.md - - versioning.md + - doc-changes.md + - mentoring.md - public-api.md - static-code-analysis.md - stress-testing.md - releasing.md - - FAQ: faq.md - - Releases ⧉: https://github.com/argoproj/argo-workflows/releases + - survey-data-privacy.md - Roadmap: roadmap.md - - Blog ⧉: https://blog.argoproj.io/ - - Slack ⧉: https://argoproj.github.io/community/join-slack - - Twitter ⧉: https://twitter.com/argoproj + - Blog: https://blog.argoproj.io/ + - Slack: https://argoproj.github.io/community/join-slack + - Twitter: https://twitter.com/argoproj + - LinkedIn: https://www.linkedin.com/company/argoproj/ diff --git a/persist/sqldb/migrate.go b/persist/sqldb/migrate.go index ebe92d5a20e9..6d91cf650e95 100644 --- a/persist/sqldb/migrate.go +++ b/persist/sqldb/migrate.go @@ -255,6 +255,9 @@ func (m migrate) Exec(ctx context.Context) (err error) { ansiSQLChange(`create index argo_archived_workflows_i2 on argo_archived_workflows (clustername,instanceid,finishedat)`), // add argo_archived_workflows name index for prefix searching performance ansiSQLChange(`create index argo_archived_workflows_i3 on argo_archived_workflows (clustername,instanceid,name)`), + // add indexes for list archived workflow performance. #8836 + ansiSQLChange(`create index argo_archived_workflows_i4 on argo_archived_workflows (startedat)`), + ansiSQLChange(`create index argo_archived_workflows_labels_i1 on argo_archived_workflows_labels (name,value)`), } { err := m.applyChange(ctx, changeSchemaVersion, change) if err != nil { diff --git a/persist/sqldb/mocks/WorkflowArchive.go b/persist/sqldb/mocks/WorkflowArchive.go index 2d77c8335861..687c8b8810c2 100644 --- a/persist/sqldb/mocks/WorkflowArchive.go +++ b/persist/sqldb/mocks/WorkflowArchive.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.9.4. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks @@ -30,6 +30,27 @@ func (_m *WorkflowArchive) ArchiveWorkflow(wf *v1alpha1.Workflow) error { return r0 } +// CountWorkflows provides a mock function with given fields: namespace, name, namePrefix, minStartAt, maxStartAt, labelRequirements +func (_m *WorkflowArchive) CountWorkflows(namespace string, name string, namePrefix string, minStartAt time.Time, maxStartAt time.Time, labelRequirements labels.Requirements) (int64, error) { + ret := _m.Called(namespace, name, namePrefix, minStartAt, maxStartAt, labelRequirements) + + var r0 int64 + if rf, ok := ret.Get(0).(func(string, string, string, time.Time, time.Time, labels.Requirements) int64); ok { + r0 = rf(namespace, name, namePrefix, minStartAt, maxStartAt, labelRequirements) + } else { + r0 = ret.Get(0).(int64) + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string, string, time.Time, time.Time, labels.Requirements) error); ok { + r1 = rf(namespace, name, namePrefix, minStartAt, maxStartAt, labelRequirements) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // DeleteExpiredWorkflows provides a mock function with given fields: ttl func (_m *WorkflowArchive) DeleteExpiredWorkflows(ttl time.Duration) error { ret := _m.Called(ttl) diff --git a/persist/sqldb/null_workflow_archive.go b/persist/sqldb/null_workflow_archive.go index 74218279ee64..3be1c0426afb 100644 --- a/persist/sqldb/null_workflow_archive.go +++ b/persist/sqldb/null_workflow_archive.go @@ -25,6 +25,10 @@ func (r *nullWorkflowArchive) ListWorkflows(string, string, string, time.Time, t return wfv1.Workflows{}, nil } +func (r *nullWorkflowArchive) CountWorkflows(string, string, string, time.Time, time.Time, labels.Requirements) (int64, error) { + return 0, nil +} + func (r *nullWorkflowArchive) GetWorkflow(string) (*wfv1.Workflow, error) { return nil, fmt.Errorf("getting archived workflows not supported") } diff --git a/persist/sqldb/offload_node_status_repo.go b/persist/sqldb/offload_node_status_repo.go index 667da1aa082d..f40456425f7b 100644 --- a/persist/sqldb/offload_node_status_repo.go +++ b/persist/sqldb/offload_node_status_repo.go @@ -35,7 +35,7 @@ func NewOffloadNodeStatusRepo(session sqlbuilder.Database, clusterName, tableNam // this environment variable allows you to make Argo Workflows delete offloaded data more or less aggressively, // useful for testing ttl := env.LookupEnvDurationOr("OFFLOAD_NODE_STATUS_TTL", 5*time.Minute) - log.WithField("ttl", ttl).Info("Node status offloading config") + log.WithField("ttl", ttl).Debug("Node status offloading config") return &nodeOffloadRepo{session: session, clusterName: clusterName, tableName: tableName, ttl: ttl}, nil } diff --git a/persist/sqldb/sqldb.go b/persist/sqldb/sqldb.go index bc68b8c89bdf..b51f4e0703e4 100644 --- a/persist/sqldb/sqldb.go +++ b/persist/sqldb/sqldb.go @@ -5,7 +5,6 @@ import ( "fmt" "time" - log "github.com/sirupsen/logrus" "k8s.io/client-go/kubernetes" "upper.io/db.v3/lib/sqlbuilder" "upper.io/db.v3/mysql" @@ -22,8 +21,6 @@ func CreateDBSession(kubectlConfig kubernetes.Interface, namespace string, persi return nil, "", errors.InternalError("Persistence config is not found") } - log.Info("Creating DB session") - if persistConfig.PostgreSQL != nil { return CreatePostGresDBSession(kubectlConfig, namespace, persistConfig.PostgreSQL, persistConfig.ConnectionPool) } else if persistConfig.MySQL != nil { diff --git a/persist/sqldb/workflow_archive.go b/persist/sqldb/workflow_archive.go index bbbdea4e8fa8..ac7ad1d7436f 100644 --- a/persist/sqldb/workflow_archive.go +++ b/persist/sqldb/workflow_archive.go @@ -46,12 +46,17 @@ type archivedWorkflowLabelRecord struct { Value string `db:"value"` } +type archivedWorkflowCount struct { + Total uint64 `db:"total,omitempty" json:"total"` +} + //go:generate mockery --name=WorkflowArchive type WorkflowArchive interface { ArchiveWorkflow(wf *wfv1.Workflow) error // list workflows, with the most recently started workflows at the beginning (i.e. index 0 is the most recent) ListWorkflows(namespace string, name string, namePrefix string, minStartAt, maxStartAt time.Time, labelRequirements labels.Requirements, limit, offset int) (wfv1.Workflows, error) + CountWorkflows(namespace string, name string, namePrefix string, minStartAt, maxStartAt time.Time, labelRequirements labels.Requirements) (int64, error) GetWorkflow(uid string) (*wfv1.Workflow, error) DeleteWorkflow(uid string) error DeleteExpiredWorkflows(ttl time.Duration) error @@ -185,6 +190,30 @@ func (r *workflowArchive) ListWorkflows(namespace string, name string, namePrefi return wfs, nil } +func (r *workflowArchive) CountWorkflows(namespace string, name string, namePrefix string, minStartedAt, maxStartedAt time.Time, labelRequirements labels.Requirements) (int64, error) { + total := &archivedWorkflowCount{} + clause, err := labelsClause(r.dbType, labelRequirements) + if err != nil { + return 0, err + } + + err = r.session. + Select(db.Raw("count(*) as total")). + From(archiveTableName). + Where(r.clusterManagedNamespaceAndInstanceID()). + And(namespaceEqual(namespace)). + And(nameEqual(name)). + And(namePrefixClause(namePrefix)). + And(startedAtClause(minStartedAt, maxStartedAt)). + And(clause). + One(total) + if err != nil { + return 0, err + } + + return int64(total.Total), nil +} + func (r *workflowArchive) clusterManagedNamespaceAndInstanceID() db.Compound { return db.And( db.Cond{"clustername": r.clusterName}, diff --git a/pkg/apiclient/_.primary.swagger.json b/pkg/apiclient/_.primary.swagger.json index f74b4e154cfe..d56f71c02c70 100644 --- a/pkg/apiclient/_.primary.swagger.json +++ b/pkg/apiclient/_.primary.swagger.json @@ -20,16 +20,86 @@ "BearerToken": { "description": "Bearer Token authentication", "type": "apiKey", - "name": "authorization", + "name": "Authorization", "in": "header" - }, - "HTTPBasic": { - "description": "HTTP Basic authentication", - "type": "basic" } }, + "security": [ + { + "BearerToken": [] + } + ], "paths": { - "/artifacts/{namespace}/{name}/{podName}/{artifactName}": { + "/artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName}": { + "get": { + "tags": [ + "ArtifactService" + ], + "summary": "Get an artifact.", + "operationId": "ArtifactService_GetArtifactFile", + "parameters": [ + { + "type": "string", + "name": "namespace", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "idDiscriminator", + "in": "path", + "required": true, + "enum": [ + "workflow", + "archived-workflows " + ] + }, + { + "type": "string", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "nodeId", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "artifactName", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "artifactDiscriminator", + "in": "path", + "required": true, + "enum": [ + "outputs" + ] + } + ], + "responses": { + "200": { + "description": "An artifact file.", + "schema": { + "type": "string", + "format": "binary" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/grpc.gateway.runtime.Error" + } + } + } + } + }, + "/artifacts/{namespace}/{name}/{nodeId}/{artifactName}": { "get": { "tags": [ "ArtifactService" @@ -51,7 +121,7 @@ }, { "type": "string", - "name": "podName", + "name": "nodeId", "in": "path", "required": true }, @@ -65,12 +135,9 @@ "responses": { "200": { "description": "An artifact file.", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } + "schema": { + "type": "string", + "format": "binary" } }, "default": { @@ -82,7 +149,7 @@ } } }, - "/input-artifacts/{namespace}/{name}/{podName}/{artifactName}": { + "/input-artifacts/{namespace}/{name}/{nodeId}/{artifactName}": { "get": { "tags": [ "ArtifactService" @@ -104,7 +171,7 @@ }, { "type": "string", - "name": "podName", + "name": "nodeId", "in": "path", "required": true }, @@ -118,12 +185,9 @@ "responses": { "200": { "description": "An artifact file.", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } + "schema": { + "type": "string", + "format": "binary" } }, "default": { @@ -135,7 +199,7 @@ } } }, - "/artifacts-by-uid/{uid}/{podName}/{artifactName}": { + "/artifacts-by-uid/{uid}/{nodeId}/{artifactName}": { "get": { "tags": [ "ArtifactService" @@ -151,7 +215,7 @@ }, { "type": "string", - "name": "podName", + "name": "nodeId", "in": "path", "required": true }, @@ -165,12 +229,9 @@ "responses": { "200": { "description": "An artifact file.", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } + "schema": { + "type": "string", + "format": "binary" } }, "default": { @@ -182,7 +243,7 @@ } } }, - "/input-artifacts-by-uid/{uid}/{podName}/{artifactName}": { + "/input-artifacts-by-uid/{uid}/{nodeId}/{artifactName}": { "get": { "tags": [ "ArtifactService" @@ -190,12 +251,6 @@ "summary": "Get an input artifact by UID.", "operationId": "ArtifactService_GetInputArtifactByUID", "parameters": [ - { - "type": "string", - "name": "namespace", - "in": "path", - "required": true - }, { "type": "string", "name": "uid", @@ -204,7 +259,7 @@ }, { "type": "string", - "name": "podName", + "name": "nodeId", "in": "path", "required": true }, @@ -218,12 +273,9 @@ "responses": { "200": { "description": "An artifact file.", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } + "schema": { + "type": "string", + "format": "binary" } }, "default": { @@ -245,6 +297,38 @@ "items": { "$ref": "#/definitions/github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowStep" } + }, + "google.protobuf.Any": { + "type": "object", + "properties": { + "type_url": { + "type": "string" + }, + "value": { + "type": "string", + "format": "byte" + } + } + }, + "grpc.gateway.runtime.Error": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/google.protobuf.Any" + } + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + } + } } } } diff --git a/pkg/apiclient/apiclient.go b/pkg/apiclient/apiclient.go index 5a70375b6b79..40fa69bd62be 100644 --- a/pkg/apiclient/apiclient.go +++ b/pkg/apiclient/apiclient.go @@ -69,8 +69,14 @@ func NewClientFromOpts(opts Opts) (context.Context, Client, error) { return nil, nil, fmt.Errorf("cannot use instance ID with Argo Server") } if opts.ArgoServerOpts.HTTP1 { + if opts.AuthSupplier == nil { + return nil, nil, fmt.Errorf("AuthSupplier cannot be empty when connecting to Argo Server") + } return newHTTP1Client(opts.ArgoServerOpts.GetURL(), opts.AuthSupplier(), opts.ArgoServerOpts.InsecureSkipVerify, opts.ArgoServerOpts.Headers) } else if opts.ArgoServerOpts.URL != "" { + if opts.AuthSupplier == nil { + return nil, nil, fmt.Errorf("AuthSupplier cannot be empty when connecting to Argo Server") + } return newArgoServerClient(opts.ArgoServerOpts, opts.AuthSupplier()) } else { if opts.ClientConfigSupplier != nil { diff --git a/pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.proto b/pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.proto index ae51bddef8cd..ccedd876b7fd 100644 --- a/pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.proto +++ b/pkg/apiclient/clusterworkflowtemplate/cluster-workflow-template.proto @@ -10,70 +10,68 @@ import "github.com/argoproj/argo-workflows/pkg/apis/workflow/v1alpha1/generated. // Workflow Service API performs CRUD actions against application resources package clusterworkflowtemplate; - message ClusterWorkflowTemplateCreateRequest { - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 2; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 2; } message ClusterWorkflowTemplateGetRequest { - string name = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 2; + string name = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 2; } message ClusterWorkflowTemplateListRequest { - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; } message ClusterWorkflowTemplateUpdateRequest { - // DEPRECATED: This field is ignored. - string name = 1 [deprecated=true]; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 2; + // DEPRECATED: This field is ignored. + string name = 1 [ deprecated = true ]; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 2; } message ClusterWorkflowTemplateDeleteRequest { - string name = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 2; + string name = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 2; } message ClusterWorkflowTemplateDeleteResponse { } message ClusterWorkflowTemplateLintRequest { - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 2; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate template = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 2; } service ClusterWorkflowTemplateService { - rpc CreateClusterWorkflowTemplate (ClusterWorkflowTemplateCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { - option (google.api.http) = { - post: "/api/v1/cluster-workflow-templates" - body: "*" - }; - } - - rpc GetClusterWorkflowTemplate (ClusterWorkflowTemplateGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { - option (google.api.http).get = "/api/v1/cluster-workflow-templates/{name}"; - } + rpc CreateClusterWorkflowTemplate(ClusterWorkflowTemplateCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { + option (google.api.http) = { + post : "/api/v1/cluster-workflow-templates" + body : "*" + }; + } - rpc ListClusterWorkflowTemplates (ClusterWorkflowTemplateListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplateList) { - option (google.api.http).get = "/api/v1/cluster-workflow-templates"; - } + rpc GetClusterWorkflowTemplate(ClusterWorkflowTemplateGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { + option (google.api.http).get = "/api/v1/cluster-workflow-templates/{name}"; + } - rpc UpdateClusterWorkflowTemplate (ClusterWorkflowTemplateUpdateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { - option (google.api.http) = { - put: "/api/v1/cluster-workflow-templates/{name}" - body: "*" - }; - } + rpc ListClusterWorkflowTemplates(ClusterWorkflowTemplateListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplateList) { + option (google.api.http).get = "/api/v1/cluster-workflow-templates"; + } - rpc DeleteClusterWorkflowTemplate (ClusterWorkflowTemplateDeleteRequest) returns (ClusterWorkflowTemplateDeleteResponse) { - option (google.api.http).delete = "/api/v1/cluster-workflow-templates/{name}"; - } + rpc UpdateClusterWorkflowTemplate(ClusterWorkflowTemplateUpdateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { + option (google.api.http) = { + put : "/api/v1/cluster-workflow-templates/{name}" + body : "*" + }; + } - rpc LintClusterWorkflowTemplate (ClusterWorkflowTemplateLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { - option (google.api.http) = { - post: "/api/v1/cluster-workflow-templates/lint" - body: "*" - }; - } + rpc DeleteClusterWorkflowTemplate(ClusterWorkflowTemplateDeleteRequest) returns (ClusterWorkflowTemplateDeleteResponse) { + option (google.api.http).delete = "/api/v1/cluster-workflow-templates/{name}"; + } + rpc LintClusterWorkflowTemplate(ClusterWorkflowTemplateLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate) { + option (google.api.http) = { + post : "/api/v1/cluster-workflow-templates/lint" + body : "*" + }; + } } diff --git a/pkg/apiclient/cronworkflow/cron-workflow.proto b/pkg/apiclient/cronworkflow/cron-workflow.proto index c4737ebaa2a6..668acac0e052 100644 --- a/pkg/apiclient/cronworkflow/cron-workflow.proto +++ b/pkg/apiclient/cronworkflow/cron-workflow.proto @@ -8,97 +8,97 @@ import "github.com/argoproj/argo-workflows/pkg/apis/workflow/v1alpha1/generated. package cronworkflow; message LintCronWorkflowRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 2; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 2; } message CreateCronWorkflowRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; } message ListCronWorkflowsRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } message GetCronWorkflowRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; } message UpdateCronWorkflowRequest { - // DEPRECATED: This field is ignored. - string name = 1 [deprecated=true]; - string namespace = 2; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 3; + // DEPRECATED: This field is ignored. + string name = 1 [ deprecated = true ]; + string namespace = 2; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow cronWorkflow = 3; } message DeleteCronWorkflowRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; } message CronWorkflowDeletedResponse { } message CronWorkflowSuspendRequest { - string name = 1; - string namespace = 2; + string name = 1; + string namespace = 2; } message CronWorkflowResumeRequest { - string name = 1; - string namespace = 2; + string name = 1; + string namespace = 2; } service CronWorkflowService { - rpc LintCronWorkflow (LintCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http) = { - post: "/api/v1/cron-workflows/{namespace}/lint" - body: "*" - }; - } - rpc CreateCronWorkflow (CreateCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http) = { - post: "/api/v1/cron-workflows/{namespace}" - body: "*" - }; - } - - rpc ListCronWorkflows (ListCronWorkflowsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflowList) { - option (google.api.http).get = "/api/v1/cron-workflows/{namespace}"; - } - - rpc GetCronWorkflow (GetCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http).get = "/api/v1/cron-workflows/{namespace}/{name}"; - } - - rpc UpdateCronWorkflow (UpdateCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http) = { - put: "/api/v1/cron-workflows/{namespace}/{name}" - body: "*" - }; - } - - rpc DeleteCronWorkflow (DeleteCronWorkflowRequest) returns (CronWorkflowDeletedResponse) { - option (google.api.http).delete = "/api/v1/cron-workflows/{namespace}/{name}"; - } - - rpc ResumeCronWorkflow (CronWorkflowResumeRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http) = { - put: "/api/v1/cron-workflows/{namespace}/{name}/resume" - body: "*" - }; - } - - rpc SuspendCronWorkflow (CronWorkflowSuspendRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { - option (google.api.http) = { - put: "/api/v1/cron-workflows/{namespace}/{name}/suspend" - body: "*" - }; - } + rpc LintCronWorkflow(LintCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http) = { + post : "/api/v1/cron-workflows/{namespace}/lint" + body : "*" + }; + } + rpc CreateCronWorkflow(CreateCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http) = { + post : "/api/v1/cron-workflows/{namespace}" + body : "*" + }; + } + + rpc ListCronWorkflows(ListCronWorkflowsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflowList) { + option (google.api.http).get = "/api/v1/cron-workflows/{namespace}"; + } + + rpc GetCronWorkflow(GetCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http).get = "/api/v1/cron-workflows/{namespace}/{name}"; + } + + rpc UpdateCronWorkflow(UpdateCronWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http) = { + put : "/api/v1/cron-workflows/{namespace}/{name}" + body : "*" + }; + } + + rpc DeleteCronWorkflow(DeleteCronWorkflowRequest) returns (CronWorkflowDeletedResponse) { + option (google.api.http).delete = "/api/v1/cron-workflows/{namespace}/{name}"; + } + + rpc ResumeCronWorkflow(CronWorkflowResumeRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http) = { + put : "/api/v1/cron-workflows/{namespace}/{name}/resume" + body : "*" + }; + } + + rpc SuspendCronWorkflow(CronWorkflowSuspendRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.CronWorkflow) { + option (google.api.http) = { + put : "/api/v1/cron-workflows/{namespace}/{name}/suspend" + body : "*" + }; + } } diff --git a/pkg/apiclient/event/event.proto b/pkg/apiclient/event/event.proto index d316786a94ed..3618beae28b2 100644 --- a/pkg/apiclient/event/event.proto +++ b/pkg/apiclient/event/event.proto @@ -8,35 +8,35 @@ import "github.com/argoproj/argo-workflows/pkg/apis/workflow/v1alpha1/generated. package event; message EventRequest { - // The namespace for the event. This can be empty if the client has cluster scoped permissions. - // If empty, then the event is "broadcast" to workflow event binding in all namespaces. - string namespace = 1; - // Optional discriminator for the event. This should almost always be empty. - // Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. - // This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or - // to support webhooks on unsecured server. Instead, use access tokens. - // This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` - string discriminator = 2; - // The event itself can be any data. - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Item payload = 3; + // The namespace for the event. This can be empty if the client has cluster scoped permissions. + // If empty, then the event is "broadcast" to workflow event binding in all namespaces. + string namespace = 1; + // Optional discriminator for the event. This should almost always be empty. + // Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. + // This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or + // to support webhooks on unsecured server. Instead, use access tokens. + // This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` + string discriminator = 2; + // The event itself can be any data. + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Item payload = 3; } message EventResponse { } message ListWorkflowEventBindingsRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } service EventService { - rpc ReceiveEvent (EventRequest) returns (EventResponse) { - option (google.api.http) = { - post: "/api/v1/events/{namespace}/{discriminator}" - body: "payload" - }; - } - rpc ListWorkflowEventBindings (ListWorkflowEventBindingsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowEventBindingList) { - option (google.api.http).get = "/api/v1/workflow-event-bindings/{namespace}"; - } + rpc ReceiveEvent(EventRequest) returns (EventResponse) { + option (google.api.http) = { + post : "/api/v1/events/{namespace}/{discriminator}" + body : "payload" + }; + } + rpc ListWorkflowEventBindings(ListWorkflowEventBindingsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowEventBindingList) { + option (google.api.http).get = "/api/v1/workflow-event-bindings/{namespace}"; + } } diff --git a/pkg/apiclient/eventsource/eventsource.proto b/pkg/apiclient/eventsource/eventsource.proto index 7538a0524cc7..932c6944cce6 100644 --- a/pkg/apiclient/eventsource/eventsource.proto +++ b/pkg/apiclient/eventsource/eventsource.proto @@ -9,61 +9,61 @@ import "github.com/argoproj/argo-events/pkg/apis/eventsource/v1alpha1/generated. package eventsource; message CreateEventSourceRequest { - string namespace = 1; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 2; + string namespace = 1; + github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 2; } message GetEventSourceRequest { - string name = 1; - string namespace = 2; + string name = 1; + string namespace = 2; } message ListEventSourcesRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } message DeleteEventSourceRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; } message UpdateEventSourceRequest { - string name = 1; - string namespace = 2; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 3; + string name = 1; + string namespace = 2; + github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 3; } message EventSourcesLogsRequest { - string namespace = 1; - // optional - only return entries for this event source - string name = 2; - // optional - only return entries for this event source type (e.g. `webhook`) - string eventSourceType = 3; - // optional - only return entries for this event name (e.g. `example`) - string eventName = 4; - // optional - only return entries where `msg` matches this regular expression - string grep = 5; - k8s.io.api.core.v1.PodLogOptions podLogOptions = 6; + string namespace = 1; + // optional - only return entries for this event source + string name = 2; + // optional - only return entries for this event source type (e.g. `webhook`) + string eventSourceType = 3; + // optional - only return entries for this event name (e.g. `example`) + string eventName = 4; + // optional - only return entries where `msg` matches this regular expression + string grep = 5; + k8s.io.api.core.v1.PodLogOptions podLogOptions = 6; } // structured log entry message LogEntry { - string namespace = 1; - string eventSourceName = 2; - // optional - the event source type (e.g. `webhook`) - string eventSourceType = 3; - // optional - the event name (e.g. `example`) - string eventName = 4; - string level = 5; - k8s.io.apimachinery.pkg.apis.meta.v1.Time time = 6; - string msg = 7; + string namespace = 1; + string eventSourceName = 2; + // optional - the event source type (e.g. `webhook`) + string eventSourceType = 3; + // optional - the event name (e.g. `example`) + string eventName = 4; + string level = 5; + k8s.io.apimachinery.pkg.apis.meta.v1.Time time = 6; + string msg = 7; } message EventSourceWatchEvent { - string type = 1; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource object = 2; + string type = 1; + github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource object = 2; } message EventSourceDeletedResponse { @@ -71,36 +71,35 @@ message EventSourceDeletedResponse { service EventSourceService { - rpc CreateEventSource (CreateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { - option (google.api.http) = { - post: "/api/v1/event-sources/{namespace}" - body: "*" - }; - } - - rpc GetEventSource (GetEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { - option (google.api.http).get = "/api/v1/event-sources/{namespace}/{name}"; - } - - rpc DeleteEventSource (DeleteEventSourceRequest) returns (EventSourceDeletedResponse) { - option (google.api.http).delete = "/api/v1/event-sources/{namespace}/{name}"; - } - - rpc UpdateEventSource (UpdateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { - option (google.api.http) = { - put: "/api/v1/event-sources/{namespace}/{name}" - body: "*" - }; - } - - rpc ListEventSources (ListEventSourcesRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSourceList) { - option (google.api.http).get = "/api/v1/event-sources/{namespace}"; - } - rpc EventSourcesLogs (EventSourcesLogsRequest) returns (stream LogEntry) { - option (google.api.http).get = "/api/v1/stream/event-sources/{namespace}/logs"; - } - rpc WatchEventSources (ListEventSourcesRequest) returns (stream EventSourceWatchEvent) { - option (google.api.http).get = "/api/v1/stream/event-sources/{namespace}"; - } - + rpc CreateEventSource(CreateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + option (google.api.http) = { + post : "/api/v1/event-sources/{namespace}" + body : "*" + }; + } + + rpc GetEventSource(GetEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + option (google.api.http).get = "/api/v1/event-sources/{namespace}/{name}"; + } + + rpc DeleteEventSource(DeleteEventSourceRequest) returns (EventSourceDeletedResponse) { + option (google.api.http).delete = "/api/v1/event-sources/{namespace}/{name}"; + } + + rpc UpdateEventSource(UpdateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + option (google.api.http) = { + put : "/api/v1/event-sources/{namespace}/{name}" + body : "*" + }; + } + + rpc ListEventSources(ListEventSourcesRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSourceList) { + option (google.api.http).get = "/api/v1/event-sources/{namespace}"; + } + rpc EventSourcesLogs(EventSourcesLogsRequest) returns (stream LogEntry) { + option (google.api.http).get = "/api/v1/stream/event-sources/{namespace}/logs"; + } + rpc WatchEventSources(ListEventSourcesRequest) returns (stream EventSourceWatchEvent) { + option (google.api.http).get = "/api/v1/stream/event-sources/{namespace}"; + } } diff --git a/pkg/apiclient/http1/facade.go b/pkg/apiclient/http1/facade.go index 9e8147cdbeb9..597b1f315ebd 100644 --- a/pkg/apiclient/http1/facade.go +++ b/pkg/apiclient/http1/facade.go @@ -112,12 +112,14 @@ func (h Facade) do(in interface{}, out interface{}, method string, path string) TLSClientConfig: &tls.Config{ InsecureSkipVerify: h.insecureSkipVerify, }, + DisableKeepAlives: true, }, } resp, err := client.Do(req) if err != nil { return err } + defer resp.Body.Close() err = errFromResponse(resp) if err != nil { return err diff --git a/pkg/apiclient/http1/info-service-client.go b/pkg/apiclient/http1/info-service-client.go index b1e934804e56..caadab06d520 100644 --- a/pkg/apiclient/http1/info-service-client.go +++ b/pkg/apiclient/http1/info-service-client.go @@ -25,3 +25,8 @@ func (h InfoServiceClient) GetUserInfo(_ context.Context, in *infopkg.GetUserInf out := &infopkg.GetUserInfoResponse{} return out, h.Get(in, out, "/api/v1/userinfo") } + +func (h InfoServiceClient) CollectEvent(_ context.Context, in *infopkg.CollectEventRequest, _ ...grpc.CallOption) (*infopkg.CollectEventResponse, error) { + out := &infopkg.CollectEventResponse{} + return out, h.Post(in, out, "/api/v1/tracking/event") +} diff --git a/pkg/apiclient/info/info.pb.go b/pkg/apiclient/info/info.pb.go index 3eef106b3ac9..c0a6d6b50aa3 100644 --- a/pkg/apiclient/info/info.pb.go +++ b/pkg/apiclient/info/info.pb.go @@ -218,15 +218,16 @@ func (m *GetUserInfoRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetUserInfoRequest proto.InternalMessageInfo type GetUserInfoResponse struct { - Issuer string `protobuf:"bytes,1,opt,name=issuer,proto3" json:"issuer,omitempty"` - Subject string `protobuf:"bytes,2,opt,name=subject,proto3" json:"subject,omitempty"` - Groups []string `protobuf:"bytes,3,rep,name=groups,proto3" json:"groups,omitempty"` - Email string `protobuf:"bytes,4,opt,name=email,proto3" json:"email,omitempty"` - EmailVerified bool `protobuf:"varint,5,opt,name=emailVerified,proto3" json:"emailVerified,omitempty"` - ServiceAccountName string `protobuf:"bytes,6,opt,name=serviceAccountName,proto3" json:"serviceAccountName,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Issuer string `protobuf:"bytes,1,opt,name=issuer,proto3" json:"issuer,omitempty"` + Subject string `protobuf:"bytes,2,opt,name=subject,proto3" json:"subject,omitempty"` + Groups []string `protobuf:"bytes,3,rep,name=groups,proto3" json:"groups,omitempty"` + Email string `protobuf:"bytes,4,opt,name=email,proto3" json:"email,omitempty"` + EmailVerified bool `protobuf:"varint,5,opt,name=emailVerified,proto3" json:"emailVerified,omitempty"` + ServiceAccountName string `protobuf:"bytes,6,opt,name=serviceAccountName,proto3" json:"serviceAccountName,omitempty"` + ServiceAccountNamespace string `protobuf:"bytes,7,opt,name=serviceAccountNamespace,proto3" json:"serviceAccountNamespace,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *GetUserInfoResponse) Reset() { *m = GetUserInfoResponse{} } @@ -304,6 +305,99 @@ func (m *GetUserInfoResponse) GetServiceAccountName() string { return "" } +func (m *GetUserInfoResponse) GetServiceAccountNamespace() string { + if m != nil { + return m.ServiceAccountNamespace + } + return "" +} + +type CollectEventRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CollectEventRequest) Reset() { *m = CollectEventRequest{} } +func (m *CollectEventRequest) String() string { return proto.CompactTextString(m) } +func (*CollectEventRequest) ProtoMessage() {} +func (*CollectEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_96940c93018255fa, []int{5} +} +func (m *CollectEventRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CollectEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CollectEventRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CollectEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CollectEventRequest.Merge(m, src) +} +func (m *CollectEventRequest) XXX_Size() int { + return m.Size() +} +func (m *CollectEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CollectEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CollectEventRequest proto.InternalMessageInfo + +func (m *CollectEventRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type CollectEventResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CollectEventResponse) Reset() { *m = CollectEventResponse{} } +func (m *CollectEventResponse) String() string { return proto.CompactTextString(m) } +func (*CollectEventResponse) ProtoMessage() {} +func (*CollectEventResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_96940c93018255fa, []int{6} +} +func (m *CollectEventResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CollectEventResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CollectEventResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CollectEventResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CollectEventResponse.Merge(m, src) +} +func (m *CollectEventResponse) XXX_Size() int { + return m.Size() +} +func (m *CollectEventResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CollectEventResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CollectEventResponse proto.InternalMessageInfo + func init() { proto.RegisterType((*GetInfoRequest)(nil), "info.GetInfoRequest") proto.RegisterType((*InfoResponse)(nil), "info.InfoResponse") @@ -311,49 +405,56 @@ func init() { proto.RegisterType((*GetVersionRequest)(nil), "info.GetVersionRequest") proto.RegisterType((*GetUserInfoRequest)(nil), "info.GetUserInfoRequest") proto.RegisterType((*GetUserInfoResponse)(nil), "info.GetUserInfoResponse") + proto.RegisterType((*CollectEventRequest)(nil), "info.CollectEventRequest") + proto.RegisterType((*CollectEventResponse)(nil), "info.CollectEventResponse") } func init() { proto.RegisterFile("pkg/apiclient/info/info.proto", fileDescriptor_96940c93018255fa) } var fileDescriptor_96940c93018255fa = []byte{ - // 579 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xc1, 0x6e, 0xd3, 0x40, - 0x10, 0x95, 0xdd, 0x36, 0x6d, 0x37, 0xa5, 0xa4, 0xdb, 0x88, 0x1a, 0x0b, 0xa2, 0x2a, 0xe2, 0x50, - 0x21, 0xb1, 0x56, 0x5a, 0x81, 0x80, 0x1b, 0x54, 0x10, 0x45, 0xa2, 0x1c, 0x8c, 0xe8, 0x01, 0x55, - 0x42, 0x1b, 0x67, 0xe2, 0x6e, 0xed, 0xec, 0x9a, 0xdd, 0xb5, 0xab, 0x5e, 0xb9, 0x71, 0xe6, 0x23, - 0xf8, 0x0f, 0x4e, 0x48, 0x5c, 0x90, 0xf8, 0x01, 0x14, 0xf1, 0x21, 0xc8, 0xeb, 0x75, 0x9a, 0xd0, - 0x1e, 0x90, 0xb8, 0x58, 0x33, 0xb3, 0xb3, 0x6f, 0xde, 0xbc, 0x99, 0x35, 0xba, 0x9b, 0x25, 0x71, - 0x40, 0x33, 0x16, 0xa5, 0x0c, 0xb8, 0x0e, 0x18, 0x1f, 0x0b, 0xf3, 0x21, 0x99, 0x14, 0x5a, 0xe0, - 0xe5, 0xd2, 0xf6, 0xef, 0xc4, 0x42, 0xc4, 0x29, 0x94, 0x79, 0x01, 0xe5, 0x5c, 0x68, 0xaa, 0x99, - 0xe0, 0xaa, 0xca, 0xf1, 0x8f, 0x62, 0xa6, 0x4f, 0xf3, 0x21, 0x89, 0xc4, 0x24, 0xa0, 0x32, 0x16, - 0x99, 0x14, 0x67, 0xc6, 0x78, 0x70, 0x2e, 0x64, 0x32, 0x4e, 0xc5, 0xb9, 0x0a, 0x6c, 0x15, 0x15, - 0xd4, 0xa1, 0xa0, 0xe8, 0xd1, 0x34, 0x3b, 0xa5, 0xbd, 0x20, 0x06, 0x0e, 0x92, 0x6a, 0x18, 0x55, - 0x70, 0xdd, 0x16, 0xda, 0xec, 0x83, 0x1e, 0xf0, 0xb1, 0x08, 0xe1, 0x43, 0x0e, 0x4a, 0x77, 0xbf, - 0xb8, 0x68, 0xa3, 0xf2, 0x55, 0x26, 0xb8, 0x02, 0x7c, 0x1f, 0xb5, 0x26, 0x94, 0xd3, 0x18, 0x46, - 0xaf, 0xe9, 0x04, 0x54, 0x46, 0x23, 0xf0, 0x9c, 0x5d, 0x67, 0x6f, 0x3d, 0xbc, 0x12, 0xc7, 0x27, - 0x68, 0x25, 0x65, 0x3c, 0x51, 0x9e, 0xbb, 0xbb, 0xb4, 0xd7, 0xdc, 0x7f, 0x49, 0x2e, 0xd9, 0x92, - 0x9a, 0xad, 0x31, 0xde, 0xcf, 0xd8, 0x92, 0xe2, 0x80, 0x64, 0x49, 0x4c, 0x4a, 0xc2, 0xa4, 0x8e, - 0x92, 0x9a, 0x30, 0x79, 0xc5, 0x78, 0x12, 0x56, 0xa0, 0xf8, 0x11, 0x6a, 0x4c, 0xc4, 0x88, 0xa6, - 0xca, 0x5b, 0x32, 0xf0, 0x1d, 0x62, 0xc4, 0x9b, 0x67, 0x4b, 0x8e, 0x4c, 0xc2, 0x0b, 0xae, 0xe5, - 0x45, 0x68, 0xb3, 0xb1, 0x8f, 0xd6, 0x38, 0x2d, 0x0e, 0x45, 0x2a, 0xa4, 0xb7, 0x6c, 0x98, 0xcf, - 0x7c, 0xff, 0x09, 0x6a, 0xce, 0x5d, 0xc1, 0x2d, 0xb4, 0x94, 0xc0, 0x85, 0xed, 0xaf, 0x34, 0x71, - 0x1b, 0xad, 0x14, 0x34, 0xcd, 0xc1, 0x73, 0x77, 0x9d, 0xbd, 0xb5, 0xb0, 0x72, 0x9e, 0xba, 0x8f, - 0x9d, 0xee, 0x36, 0xda, 0xea, 0x83, 0x3e, 0x06, 0xa9, 0x98, 0xe0, 0xb5, 0x7c, 0x6d, 0x84, 0xfb, - 0xa0, 0xdf, 0x2a, 0x90, 0xf3, 0xa2, 0x7e, 0x77, 0xd0, 0xf6, 0x42, 0xd8, 0x6a, 0x7b, 0x0b, 0x35, - 0x98, 0x52, 0x39, 0x48, 0x5b, 0xd1, 0x7a, 0xd8, 0x43, 0xab, 0x2a, 0x1f, 0x9e, 0x41, 0xa4, 0x4d, - 0xd9, 0xf5, 0xb0, 0x76, 0xcb, 0x1b, 0xb1, 0x14, 0x79, 0x56, 0x69, 0xb0, 0x1e, 0x5a, 0xaf, 0xa4, - 0x09, 0x13, 0xca, 0x52, 0xdb, 0x60, 0xe5, 0xe0, 0x7b, 0xe8, 0x86, 0x31, 0x8e, 0x41, 0xb2, 0x31, - 0x83, 0x91, 0xb7, 0x62, 0x9a, 0x58, 0x0c, 0x62, 0x82, 0xb0, 0x02, 0x59, 0xb0, 0x08, 0x9e, 0x45, - 0x91, 0xc8, 0xb9, 0x2e, 0x07, 0xea, 0x35, 0x0c, 0xd0, 0x35, 0x27, 0xfb, 0x5f, 0x5d, 0xd4, 0x2c, - 0xdb, 0x78, 0x53, 0x1d, 0xe1, 0x01, 0x5a, 0xb5, 0x4b, 0x84, 0xdb, 0xd5, 0x48, 0x16, 0x77, 0xca, - 0xc7, 0x57, 0x07, 0xd5, 0x6d, 0x7f, 0xfc, 0xf9, 0xfb, 0xb3, 0xbb, 0x89, 0x37, 0xcc, 0xa2, 0x17, - 0x3d, 0xf3, 0x10, 0xf0, 0x27, 0x07, 0xa1, 0x4b, 0x51, 0xf1, 0xce, 0x0c, 0x6e, 0x51, 0x66, 0x7f, - 0xf0, 0xff, 0x9b, 0x65, 0x11, 0xbb, 0x3b, 0x86, 0xc8, 0x16, 0xbe, 0x59, 0x13, 0x29, 0x6c, 0xf1, - 0x13, 0xd4, 0x9c, 0x9b, 0x19, 0xf6, 0x66, 0x5c, 0xfe, 0x9a, 0xae, 0x7f, 0xfb, 0x9a, 0x13, 0xdb, - 0xa5, 0x67, 0xc0, 0x31, 0x6e, 0xd5, 0xe0, 0xb9, 0x02, 0x59, 0x66, 0x3f, 0x3f, 0xfc, 0x36, 0xed, - 0x38, 0x3f, 0xa6, 0x1d, 0xe7, 0xd7, 0xb4, 0xe3, 0xbc, 0x7b, 0xf8, 0xef, 0xcf, 0x7a, 0xee, 0xe7, - 0x31, 0x6c, 0x98, 0x57, 0x7c, 0xf0, 0x27, 0x00, 0x00, 0xff, 0xff, 0x1e, 0xf1, 0xc6, 0xc3, 0x59, - 0x04, 0x00, 0x00, + // 662 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xc1, 0x4e, 0x14, 0x4b, + 0x14, 0x4d, 0xcf, 0xc0, 0x00, 0x77, 0x78, 0xbc, 0xa1, 0x98, 0x40, 0xbf, 0x7e, 0xef, 0x4d, 0xb0, + 0xe3, 0x02, 0x49, 0xec, 0x0e, 0x10, 0x0d, 0xb2, 0x53, 0x82, 0x84, 0x44, 0x5c, 0x8c, 0x91, 0x85, + 0x21, 0x31, 0x45, 0xcf, 0x9d, 0xa6, 0x98, 0x9e, 0xaa, 0xb6, 0xaa, 0xba, 0x09, 0x5b, 0x13, 0x17, + 0xae, 0xfd, 0x08, 0x37, 0x7e, 0x88, 0x4b, 0x13, 0x7f, 0xc0, 0x10, 0x3f, 0xc4, 0x74, 0x75, 0xf5, + 0x30, 0x23, 0x63, 0x62, 0xe2, 0xa6, 0x73, 0xef, 0xad, 0x5b, 0xe7, 0x9c, 0x3a, 0x75, 0xbb, 0xe0, + 0xff, 0x74, 0x10, 0x87, 0x34, 0x65, 0x51, 0xc2, 0x90, 0xeb, 0x90, 0xf1, 0xbe, 0x30, 0x9f, 0x20, + 0x95, 0x42, 0x0b, 0x32, 0x53, 0xc4, 0xde, 0x7f, 0xb1, 0x10, 0x71, 0x82, 0x45, 0x5f, 0x48, 0x39, + 0x17, 0x9a, 0x6a, 0x26, 0xb8, 0x2a, 0x7b, 0xbc, 0xe3, 0x98, 0xe9, 0xf3, 0xec, 0x2c, 0x88, 0xc4, + 0x30, 0xa4, 0x32, 0x16, 0xa9, 0x14, 0x17, 0x26, 0xb8, 0x7f, 0x29, 0xe4, 0xa0, 0x9f, 0x88, 0x4b, + 0x15, 0x5a, 0x16, 0x15, 0x56, 0xa5, 0x30, 0xdf, 0xa2, 0x49, 0x7a, 0x4e, 0xb7, 0xc2, 0x18, 0x39, + 0x4a, 0xaa, 0xb1, 0x57, 0xc2, 0xf9, 0x2d, 0x58, 0x3a, 0x44, 0x7d, 0xc4, 0xfb, 0xa2, 0x8b, 0x6f, + 0x32, 0x54, 0xda, 0xff, 0x58, 0x83, 0xc5, 0x32, 0x57, 0xa9, 0xe0, 0x0a, 0xc9, 0x26, 0xb4, 0x86, + 0x94, 0xd3, 0x18, 0x7b, 0xcf, 0xe9, 0x10, 0x55, 0x4a, 0x23, 0x74, 0x9d, 0x75, 0x67, 0x63, 0xa1, + 0x7b, 0xab, 0x4e, 0x4e, 0x61, 0x36, 0x61, 0x7c, 0xa0, 0xdc, 0xda, 0x7a, 0x7d, 0xa3, 0xb9, 0xfd, + 0x34, 0xb8, 0x51, 0x1b, 0x54, 0x6a, 0x4d, 0xf0, 0x7a, 0xa4, 0x36, 0xc8, 0x77, 0x82, 0x74, 0x10, + 0x07, 0x85, 0xe0, 0xa0, 0xaa, 0x06, 0x95, 0xe0, 0xe0, 0x19, 0xe3, 0x83, 0x6e, 0x09, 0x4a, 0x1e, + 0x42, 0x63, 0x28, 0x7a, 0x34, 0x51, 0x6e, 0xdd, 0xc0, 0x77, 0x02, 0x63, 0xde, 0xb8, 0xda, 0xe0, + 0xd8, 0x34, 0x1c, 0x70, 0x2d, 0xaf, 0xba, 0xb6, 0x9b, 0x78, 0x30, 0xcf, 0x69, 0xbe, 0x2f, 0x12, + 0x21, 0xdd, 0x19, 0xa3, 0x7c, 0x94, 0x7b, 0x8f, 0xa0, 0x39, 0xb6, 0x85, 0xb4, 0xa0, 0x3e, 0xc0, + 0x2b, 0x7b, 0xbe, 0x22, 0x24, 0x6d, 0x98, 0xcd, 0x69, 0x92, 0xa1, 0x5b, 0x5b, 0x77, 0x36, 0xe6, + 0xbb, 0x65, 0xb2, 0x57, 0xdb, 0x75, 0xfc, 0x15, 0x58, 0x3e, 0x44, 0x7d, 0x82, 0x52, 0x31, 0xc1, + 0x2b, 0xfb, 0xda, 0x40, 0x0e, 0x51, 0xbf, 0x54, 0x28, 0xc7, 0x4d, 0x7d, 0x57, 0x83, 0x95, 0x89, + 0xb2, 0xf5, 0x76, 0x15, 0x1a, 0x4c, 0xa9, 0x0c, 0xa5, 0x65, 0xb4, 0x19, 0x71, 0x61, 0x4e, 0x65, + 0x67, 0x17, 0x18, 0x69, 0x43, 0xbb, 0xd0, 0xad, 0xd2, 0x62, 0x47, 0x2c, 0x45, 0x96, 0x96, 0x1e, + 0x2c, 0x74, 0x6d, 0x56, 0xc8, 0xc4, 0x21, 0x65, 0x89, 0x3d, 0x60, 0x99, 0x90, 0xbb, 0xf0, 0x97, + 0x09, 0x4e, 0x50, 0xb2, 0x3e, 0xc3, 0x9e, 0x3b, 0x6b, 0x0e, 0x31, 0x59, 0x24, 0x01, 0x10, 0x85, + 0x32, 0x67, 0x11, 0x3e, 0x8e, 0x22, 0x91, 0x71, 0x5d, 0x5c, 0xa8, 0xdb, 0x30, 0x40, 0x53, 0x56, + 0xc8, 0x2e, 0xac, 0xdd, 0xae, 0x96, 0x83, 0x31, 0x67, 0x36, 0xfd, 0x6a, 0xd9, 0xbf, 0x07, 0x2b, + 0xfb, 0x22, 0x49, 0x30, 0xd2, 0x07, 0x39, 0x72, 0x6d, 0xed, 0x21, 0x04, 0x66, 0x78, 0x41, 0x59, + 0x9a, 0x60, 0x62, 0x7f, 0x15, 0xda, 0x93, 0xad, 0xa5, 0x65, 0xdb, 0x9f, 0xea, 0xd0, 0x2c, 0x3c, + 0x7c, 0x51, 0x52, 0x90, 0x23, 0x98, 0xb3, 0x13, 0x4c, 0xda, 0xe5, 0x3c, 0x4c, 0x0e, 0xb4, 0x47, + 0x6e, 0x4f, 0x89, 0xdf, 0x7e, 0xfb, 0xf5, 0xfb, 0x87, 0xda, 0x12, 0x59, 0x34, 0x7f, 0x59, 0xbe, + 0x65, 0xfe, 0x42, 0xf2, 0xde, 0x01, 0xb8, 0xb9, 0x51, 0xb2, 0x36, 0x82, 0x9b, 0xbc, 0x63, 0xef, + 0xe8, 0xcf, 0xc7, 0xda, 0x22, 0xfa, 0x6b, 0x46, 0xc8, 0x32, 0xf9, 0xbb, 0x12, 0x92, 0x5b, 0xf2, + 0x53, 0x68, 0x8e, 0x0d, 0x0c, 0x71, 0x47, 0x5a, 0x7e, 0x1a, 0x2d, 0xef, 0x9f, 0x29, 0x2b, 0xf6, + 0x94, 0xae, 0x01, 0x27, 0xa4, 0x55, 0x81, 0x67, 0x0a, 0xa5, 0x39, 0xe9, 0x39, 0x2c, 0x8e, 0x9b, + 0x4b, 0x2c, 0xc8, 0x94, 0xbb, 0xf1, 0xbc, 0x69, 0x4b, 0x96, 0xe0, 0x8e, 0x21, 0xf8, 0xd7, 0x5f, + 0xad, 0x08, 0xb4, 0xa4, 0xd1, 0x80, 0xf1, 0x38, 0xc4, 0xa2, 0x6f, 0xcf, 0xd9, 0x7c, 0xb2, 0xff, + 0xf9, 0xba, 0xe3, 0x7c, 0xb9, 0xee, 0x38, 0xdf, 0xae, 0x3b, 0xce, 0xab, 0x07, 0xbf, 0xff, 0x7a, + 0x8d, 0xbd, 0x91, 0x67, 0x0d, 0xf3, 0x58, 0xed, 0xfc, 0x08, 0x00, 0x00, 0xff, 0xff, 0x93, 0x7e, + 0xb8, 0xbe, 0x40, 0x05, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -371,6 +472,7 @@ type InfoServiceClient interface { GetInfo(ctx context.Context, in *GetInfoRequest, opts ...grpc.CallOption) (*InfoResponse, error) GetVersion(ctx context.Context, in *GetVersionRequest, opts ...grpc.CallOption) (*v1alpha1.Version, error) GetUserInfo(ctx context.Context, in *GetUserInfoRequest, opts ...grpc.CallOption) (*GetUserInfoResponse, error) + CollectEvent(ctx context.Context, in *CollectEventRequest, opts ...grpc.CallOption) (*CollectEventResponse, error) } type infoServiceClient struct { @@ -408,11 +510,21 @@ func (c *infoServiceClient) GetUserInfo(ctx context.Context, in *GetUserInfoRequ return out, nil } +func (c *infoServiceClient) CollectEvent(ctx context.Context, in *CollectEventRequest, opts ...grpc.CallOption) (*CollectEventResponse, error) { + out := new(CollectEventResponse) + err := c.cc.Invoke(ctx, "/info.InfoService/CollectEvent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // InfoServiceServer is the server API for InfoService service. type InfoServiceServer interface { GetInfo(context.Context, *GetInfoRequest) (*InfoResponse, error) GetVersion(context.Context, *GetVersionRequest) (*v1alpha1.Version, error) GetUserInfo(context.Context, *GetUserInfoRequest) (*GetUserInfoResponse, error) + CollectEvent(context.Context, *CollectEventRequest) (*CollectEventResponse, error) } // UnimplementedInfoServiceServer can be embedded to have forward compatible implementations. @@ -428,6 +540,9 @@ func (*UnimplementedInfoServiceServer) GetVersion(ctx context.Context, req *GetV func (*UnimplementedInfoServiceServer) GetUserInfo(ctx context.Context, req *GetUserInfoRequest) (*GetUserInfoResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetUserInfo not implemented") } +func (*UnimplementedInfoServiceServer) CollectEvent(ctx context.Context, req *CollectEventRequest) (*CollectEventResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CollectEvent not implemented") +} func RegisterInfoServiceServer(s *grpc.Server, srv InfoServiceServer) { s.RegisterService(&_InfoService_serviceDesc, srv) @@ -487,6 +602,24 @@ func _InfoService_GetUserInfo_Handler(srv interface{}, ctx context.Context, dec return interceptor(ctx, in, info, handler) } +func _InfoService_CollectEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CollectEventRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InfoServiceServer).CollectEvent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/info.InfoService/CollectEvent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InfoServiceServer).CollectEvent(ctx, req.(*CollectEventRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _InfoService_serviceDesc = grpc.ServiceDesc{ ServiceName: "info.InfoService", HandlerType: (*InfoServiceServer)(nil), @@ -503,6 +636,10 @@ var _InfoService_serviceDesc = grpc.ServiceDesc{ MethodName: "GetUserInfo", Handler: _InfoService_GetUserInfo_Handler, }, + { + MethodName: "CollectEvent", + Handler: _InfoService_CollectEvent_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "pkg/apiclient/info/info.proto", @@ -690,6 +827,13 @@ func (m *GetUserInfoResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ServiceAccountNamespace) > 0 { + i -= len(m.ServiceAccountNamespace) + copy(dAtA[i:], m.ServiceAccountNamespace) + i = encodeVarintInfo(dAtA, i, uint64(len(m.ServiceAccountNamespace))) + i-- + dAtA[i] = 0x3a + } if len(m.ServiceAccountName) > 0 { i -= len(m.ServiceAccountName) copy(dAtA[i:], m.ServiceAccountName) @@ -740,6 +884,67 @@ func (m *GetUserInfoResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *CollectEventRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *CollectEventRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *CollectEventRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if len(m.Name) > 0 { + i -= len(m.Name) + copy(dAtA[i:], m.Name) + i = encodeVarintInfo(dAtA, i, uint64(len(m.Name))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *CollectEventResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *CollectEventResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *CollectEventResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + return len(dAtA) - i, nil +} + func encodeVarintInfo(dAtA []byte, offset int, v uint64) int { offset -= sovInfo(v) base := offset @@ -852,6 +1057,38 @@ func (m *GetUserInfoResponse) Size() (n int) { if l > 0 { n += 1 + l + sovInfo(uint64(l)) } + l = len(m.ServiceAccountNamespace) + if l > 0 { + n += 1 + l + sovInfo(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *CollectEventRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Name) + if l > 0 { + n += 1 + l + sovInfo(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *CollectEventResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -1490,6 +1727,172 @@ func (m *GetUserInfoResponse) Unmarshal(dAtA []byte) error { } m.ServiceAccountName = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ServiceAccountNamespace", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthInfo + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ServiceAccountNamespace = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipInfo(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthInfo + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *CollectEventRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CollectEventRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CollectEventRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthInfo + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Name = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipInfo(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthInfo + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *CollectEventResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CollectEventResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CollectEventResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { default: iNdEx = preIndex skippy, err := skipInfo(dAtA[iNdEx:]) diff --git a/pkg/apiclient/info/info.pb.gw.go b/pkg/apiclient/info/info.pb.gw.go index 5b023ca0d673..245ce70c58b9 100644 --- a/pkg/apiclient/info/info.pb.gw.go +++ b/pkg/apiclient/info/info.pb.gw.go @@ -87,6 +87,40 @@ func local_request_InfoService_GetUserInfo_0(ctx context.Context, marshaler runt } +func request_InfoService_CollectEvent_0(ctx context.Context, marshaler runtime.Marshaler, client InfoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CollectEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CollectEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_InfoService_CollectEvent_0(ctx context.Context, marshaler runtime.Marshaler, server InfoServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CollectEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.CollectEvent(ctx, &protoReq) + return msg, metadata, err + +} + // RegisterInfoServiceHandlerServer registers the http handlers for service InfoService to "mux". // UnaryRPC :call InfoServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. @@ -162,6 +196,29 @@ func RegisterInfoServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux }) + mux.Handle("POST", pattern_InfoService_CollectEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_InfoService_CollectEvent_0(rctx, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_InfoService_CollectEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -263,6 +320,26 @@ func RegisterInfoServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux }) + mux.Handle("POST", pattern_InfoService_CollectEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_InfoService_CollectEvent_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_InfoService_CollectEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -272,6 +349,8 @@ var ( pattern_InfoService_GetVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "version"}, "", runtime.AssumeColonVerbOpt(true))) pattern_InfoService_GetUserInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "userinfo"}, "", runtime.AssumeColonVerbOpt(true))) + + pattern_InfoService_CollectEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "tracking", "event"}, "", runtime.AssumeColonVerbOpt(true))) ) var ( @@ -280,4 +359,6 @@ var ( forward_InfoService_GetVersion_0 = runtime.ForwardResponseMessage forward_InfoService_GetUserInfo_0 = runtime.ForwardResponseMessage + + forward_InfoService_CollectEvent_0 = runtime.ForwardResponseMessage ) diff --git a/pkg/apiclient/info/info.proto b/pkg/apiclient/info/info.proto index b98f2e00b2a3..118d71c8a960 100644 --- a/pkg/apiclient/info/info.proto +++ b/pkg/apiclient/info/info.proto @@ -10,11 +10,11 @@ message GetInfoRequest { } message InfoResponse { - string managedNamespace = 1; - repeated github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Link links = 2; - // which modals to show - map modals = 3; - string navColor = 4; + string managedNamespace = 1; + repeated github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Link links = 2; + // which modals to show + map modals = 3; + string navColor = 4; } message GetVersionRequest { @@ -24,22 +24,36 @@ message GetUserInfoRequest { } message GetUserInfoResponse { - string issuer = 1; - string subject = 2; - repeated string groups = 3; - string email = 4; - bool emailVerified = 5; - string serviceAccountName = 6; + string issuer = 1; + string subject = 2; + repeated string groups = 3; + string email = 4; + bool emailVerified = 5; + string serviceAccountName = 6; + string serviceAccountNamespace = 7; +} + +message CollectEventRequest { + string name = 1; +} + +message CollectEventResponse { } service InfoService { - rpc GetInfo (GetInfoRequest) returns (InfoResponse) { - option (google.api.http).get = "/api/v1/info"; - } - rpc GetVersion (GetVersionRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Version) { - option (google.api.http).get = "/api/v1/version"; - } - rpc GetUserInfo (GetUserInfoRequest) returns (GetUserInfoResponse) { - option (google.api.http).get = "/api/v1/userinfo"; - } + rpc GetInfo(GetInfoRequest) returns (InfoResponse) { + option (google.api.http).get = "/api/v1/info"; + } + rpc GetVersion(GetVersionRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Version) { + option (google.api.http).get = "/api/v1/version"; + } + rpc GetUserInfo(GetUserInfoRequest) returns (GetUserInfoResponse) { + option (google.api.http).get = "/api/v1/userinfo"; + } + rpc CollectEvent(CollectEventRequest) returns (CollectEventResponse) { + option (google.api.http) = { + post : "/api/v1/tracking/event" + body : "*" + }; + } } diff --git a/pkg/apiclient/offline-workflow-service-client.go b/pkg/apiclient/offline-workflow-service-client.go index fb2f109940cf..add75290ceaf 100644 --- a/pkg/apiclient/offline-workflow-service-client.go +++ b/pkg/apiclient/offline-workflow-service-client.go @@ -82,7 +82,7 @@ func (o offlineClusterWorkflowTemplateNamespacedGetter) Get(name string) (*wfv1. } func (o OfflineWorkflowServiceClient) LintWorkflow(_ context.Context, req *workflowpkg.WorkflowLintRequest, _ ...grpc.CallOption) (*wfv1.Workflow, error) { - _, err := validate.ValidateWorkflow(&offlineWorkflowTemplateNamespacedGetter{}, &offlineClusterWorkflowTemplateNamespacedGetter{}, req.Workflow, validate.ValidateOpts{Lint: true}) + err := validate.ValidateWorkflow(&offlineWorkflowTemplateNamespacedGetter{}, &offlineClusterWorkflowTemplateNamespacedGetter{}, req.Workflow, validate.ValidateOpts{Lint: true}) if err != nil { return nil, err } diff --git a/pkg/apiclient/pipeline/forwarder_overwrite.go b/pkg/apiclient/pipeline/forwarder_overwrite.go deleted file mode 100644 index 9dd891875d68..000000000000 --- a/pkg/apiclient/pipeline/forwarder_overwrite.go +++ /dev/null @@ -1,11 +0,0 @@ -package pipeline - -import ( - "github.com/argoproj/pkg/grpc/http" -) - -func init() { - forward_PipelineService_WatchPipelines_0 = http.StreamForwarder - forward_PipelineService_PipelineLogs_0 = http.StreamForwarder - forward_PipelineService_WatchSteps_0 = http.StreamForwarder -} diff --git a/pkg/apiclient/pipeline/pipeline.pb.go b/pkg/apiclient/pipeline/pipeline.pb.go deleted file mode 100644 index e1763843a883..000000000000 --- a/pkg/apiclient/pipeline/pipeline.pb.go +++ /dev/null @@ -1,3408 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pkg/apiclient/pipeline/pipeline.proto - -package pipeline - -import ( - context "context" - fmt "fmt" - v1alpha1 "github.com/argoproj-labs/argo-dataflow/api/v1alpha1" - proto "github.com/gogo/protobuf/proto" - _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - io "io" - v11 "k8s.io/api/core/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -type ListPipelinesRequest struct { - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` - ListOptions *v1.ListOptions `protobuf:"bytes,2,opt,name=listOptions,proto3" json:"listOptions,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ListPipelinesRequest) Reset() { *m = ListPipelinesRequest{} } -func (m *ListPipelinesRequest) String() string { return proto.CompactTextString(m) } -func (*ListPipelinesRequest) ProtoMessage() {} -func (*ListPipelinesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{0} -} -func (m *ListPipelinesRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *ListPipelinesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_ListPipelinesRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *ListPipelinesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_ListPipelinesRequest.Merge(m, src) -} -func (m *ListPipelinesRequest) XXX_Size() int { - return m.Size() -} -func (m *ListPipelinesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_ListPipelinesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_ListPipelinesRequest proto.InternalMessageInfo - -func (m *ListPipelinesRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *ListPipelinesRequest) GetListOptions() *v1.ListOptions { - if m != nil { - return m.ListOptions - } - return nil -} - -type PipelineWatchEvent struct { - Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` - Object *v1alpha1.Pipeline `protobuf:"bytes,2,opt,name=object,proto3" json:"object,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PipelineWatchEvent) Reset() { *m = PipelineWatchEvent{} } -func (m *PipelineWatchEvent) String() string { return proto.CompactTextString(m) } -func (*PipelineWatchEvent) ProtoMessage() {} -func (*PipelineWatchEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{1} -} -func (m *PipelineWatchEvent) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *PipelineWatchEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PipelineWatchEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *PipelineWatchEvent) XXX_Merge(src proto.Message) { - xxx_messageInfo_PipelineWatchEvent.Merge(m, src) -} -func (m *PipelineWatchEvent) XXX_Size() int { - return m.Size() -} -func (m *PipelineWatchEvent) XXX_DiscardUnknown() { - xxx_messageInfo_PipelineWatchEvent.DiscardUnknown(m) -} - -var xxx_messageInfo_PipelineWatchEvent proto.InternalMessageInfo - -func (m *PipelineWatchEvent) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *PipelineWatchEvent) GetObject() *v1alpha1.Pipeline { - if m != nil { - return m.Object - } - return nil -} - -type GetPipelineRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` - GetOptions *v1.GetOptions `protobuf:"bytes,3,opt,name=getOptions,proto3" json:"getOptions,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetPipelineRequest) Reset() { *m = GetPipelineRequest{} } -func (m *GetPipelineRequest) String() string { return proto.CompactTextString(m) } -func (*GetPipelineRequest) ProtoMessage() {} -func (*GetPipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{2} -} -func (m *GetPipelineRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *GetPipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GetPipelineRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *GetPipelineRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetPipelineRequest.Merge(m, src) -} -func (m *GetPipelineRequest) XXX_Size() int { - return m.Size() -} -func (m *GetPipelineRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetPipelineRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetPipelineRequest proto.InternalMessageInfo - -func (m *GetPipelineRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetPipelineRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *GetPipelineRequest) GetGetOptions() *v1.GetOptions { - if m != nil { - return m.GetOptions - } - return nil -} - -type RestartPipelineRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RestartPipelineRequest) Reset() { *m = RestartPipelineRequest{} } -func (m *RestartPipelineRequest) String() string { return proto.CompactTextString(m) } -func (*RestartPipelineRequest) ProtoMessage() {} -func (*RestartPipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{3} -} -func (m *RestartPipelineRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *RestartPipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_RestartPipelineRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *RestartPipelineRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RestartPipelineRequest.Merge(m, src) -} -func (m *RestartPipelineRequest) XXX_Size() int { - return m.Size() -} -func (m *RestartPipelineRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RestartPipelineRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RestartPipelineRequest proto.InternalMessageInfo - -func (m *RestartPipelineRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *RestartPipelineRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -type RestartPipelineResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RestartPipelineResponse) Reset() { *m = RestartPipelineResponse{} } -func (m *RestartPipelineResponse) String() string { return proto.CompactTextString(m) } -func (*RestartPipelineResponse) ProtoMessage() {} -func (*RestartPipelineResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{4} -} -func (m *RestartPipelineResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *RestartPipelineResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_RestartPipelineResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *RestartPipelineResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RestartPipelineResponse.Merge(m, src) -} -func (m *RestartPipelineResponse) XXX_Size() int { - return m.Size() -} -func (m *RestartPipelineResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RestartPipelineResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RestartPipelineResponse proto.InternalMessageInfo - -type DeletePipelineRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` - DeleteOptions *v1.DeleteOptions `protobuf:"bytes,3,opt,name=deleteOptions,proto3" json:"deleteOptions,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeletePipelineRequest) Reset() { *m = DeletePipelineRequest{} } -func (m *DeletePipelineRequest) String() string { return proto.CompactTextString(m) } -func (*DeletePipelineRequest) ProtoMessage() {} -func (*DeletePipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{5} -} -func (m *DeletePipelineRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *DeletePipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_DeletePipelineRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *DeletePipelineRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeletePipelineRequest.Merge(m, src) -} -func (m *DeletePipelineRequest) XXX_Size() int { - return m.Size() -} -func (m *DeletePipelineRequest) XXX_DiscardUnknown() { - xxx_messageInfo_DeletePipelineRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_DeletePipelineRequest proto.InternalMessageInfo - -func (m *DeletePipelineRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *DeletePipelineRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *DeletePipelineRequest) GetDeleteOptions() *v1.DeleteOptions { - if m != nil { - return m.DeleteOptions - } - return nil -} - -type DeletePipelineResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeletePipelineResponse) Reset() { *m = DeletePipelineResponse{} } -func (m *DeletePipelineResponse) String() string { return proto.CompactTextString(m) } -func (*DeletePipelineResponse) ProtoMessage() {} -func (*DeletePipelineResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{6} -} -func (m *DeletePipelineResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *DeletePipelineResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_DeletePipelineResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *DeletePipelineResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeletePipelineResponse.Merge(m, src) -} -func (m *DeletePipelineResponse) XXX_Size() int { - return m.Size() -} -func (m *DeletePipelineResponse) XXX_DiscardUnknown() { - xxx_messageInfo_DeletePipelineResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_DeletePipelineResponse proto.InternalMessageInfo - -type WatchStepRequest struct { - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` - ListOptions *v1.ListOptions `protobuf:"bytes,2,opt,name=listOptions,proto3" json:"listOptions,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WatchStepRequest) Reset() { *m = WatchStepRequest{} } -func (m *WatchStepRequest) String() string { return proto.CompactTextString(m) } -func (*WatchStepRequest) ProtoMessage() {} -func (*WatchStepRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{7} -} -func (m *WatchStepRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *WatchStepRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_WatchStepRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *WatchStepRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_WatchStepRequest.Merge(m, src) -} -func (m *WatchStepRequest) XXX_Size() int { - return m.Size() -} -func (m *WatchStepRequest) XXX_DiscardUnknown() { - xxx_messageInfo_WatchStepRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_WatchStepRequest proto.InternalMessageInfo - -func (m *WatchStepRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *WatchStepRequest) GetListOptions() *v1.ListOptions { - if m != nil { - return m.ListOptions - } - return nil -} - -type StepWatchEvent struct { - Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` - Object *v1alpha1.Step `protobuf:"bytes,2,opt,name=object,proto3" json:"object,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StepWatchEvent) Reset() { *m = StepWatchEvent{} } -func (m *StepWatchEvent) String() string { return proto.CompactTextString(m) } -func (*StepWatchEvent) ProtoMessage() {} -func (*StepWatchEvent) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{8} -} -func (m *StepWatchEvent) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *StepWatchEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_StepWatchEvent.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *StepWatchEvent) XXX_Merge(src proto.Message) { - xxx_messageInfo_StepWatchEvent.Merge(m, src) -} -func (m *StepWatchEvent) XXX_Size() int { - return m.Size() -} -func (m *StepWatchEvent) XXX_DiscardUnknown() { - xxx_messageInfo_StepWatchEvent.DiscardUnknown(m) -} - -var xxx_messageInfo_StepWatchEvent proto.InternalMessageInfo - -func (m *StepWatchEvent) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *StepWatchEvent) GetObject() *v1alpha1.Step { - if m != nil { - return m.Object - } - return nil -} - -type PipelineLogsRequest struct { - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` - // optional - only return entries for this pipeline - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - // optional - only return entries for this step - StepName string `protobuf:"bytes,3,opt,name=stepName,proto3" json:"stepName,omitempty"` - // optional - only return entries which match this expresssion - Grep string `protobuf:"bytes,4,opt,name=grep,proto3" json:"grep,omitempty"` - PodLogOptions *v11.PodLogOptions `protobuf:"bytes,5,opt,name=podLogOptions,proto3" json:"podLogOptions,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PipelineLogsRequest) Reset() { *m = PipelineLogsRequest{} } -func (m *PipelineLogsRequest) String() string { return proto.CompactTextString(m) } -func (*PipelineLogsRequest) ProtoMessage() {} -func (*PipelineLogsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{9} -} -func (m *PipelineLogsRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *PipelineLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PipelineLogsRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *PipelineLogsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PipelineLogsRequest.Merge(m, src) -} -func (m *PipelineLogsRequest) XXX_Size() int { - return m.Size() -} -func (m *PipelineLogsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PipelineLogsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PipelineLogsRequest proto.InternalMessageInfo - -func (m *PipelineLogsRequest) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *PipelineLogsRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *PipelineLogsRequest) GetStepName() string { - if m != nil { - return m.StepName - } - return "" -} - -func (m *PipelineLogsRequest) GetGrep() string { - if m != nil { - return m.Grep - } - return "" -} - -func (m *PipelineLogsRequest) GetPodLogOptions() *v11.PodLogOptions { - if m != nil { - return m.PodLogOptions - } - return nil -} - -// structured log entry -type LogEntry struct { - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` - PipelineName string `protobuf:"bytes,2,opt,name=pipelineName,proto3" json:"pipelineName,omitempty"` - StepName string `protobuf:"bytes,3,opt,name=stepName,proto3" json:"stepName,omitempty"` - Time *v1.Time `protobuf:"bytes,6,opt,name=time,proto3" json:"time,omitempty"` - Msg string `protobuf:"bytes,7,opt,name=msg,proto3" json:"msg,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *LogEntry) Reset() { *m = LogEntry{} } -func (m *LogEntry) String() string { return proto.CompactTextString(m) } -func (*LogEntry) ProtoMessage() {} -func (*LogEntry) Descriptor() ([]byte, []int) { - return fileDescriptor_71dfb1a81115c785, []int{10} -} -func (m *LogEntry) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *LogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_LogEntry.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *LogEntry) XXX_Merge(src proto.Message) { - xxx_messageInfo_LogEntry.Merge(m, src) -} -func (m *LogEntry) XXX_Size() int { - return m.Size() -} -func (m *LogEntry) XXX_DiscardUnknown() { - xxx_messageInfo_LogEntry.DiscardUnknown(m) -} - -var xxx_messageInfo_LogEntry proto.InternalMessageInfo - -func (m *LogEntry) GetNamespace() string { - if m != nil { - return m.Namespace - } - return "" -} - -func (m *LogEntry) GetPipelineName() string { - if m != nil { - return m.PipelineName - } - return "" -} - -func (m *LogEntry) GetStepName() string { - if m != nil { - return m.StepName - } - return "" -} - -func (m *LogEntry) GetTime() *v1.Time { - if m != nil { - return m.Time - } - return nil -} - -func (m *LogEntry) GetMsg() string { - if m != nil { - return m.Msg - } - return "" -} - -func init() { - proto.RegisterType((*ListPipelinesRequest)(nil), "pipeline.ListPipelinesRequest") - proto.RegisterType((*PipelineWatchEvent)(nil), "pipeline.PipelineWatchEvent") - proto.RegisterType((*GetPipelineRequest)(nil), "pipeline.GetPipelineRequest") - proto.RegisterType((*RestartPipelineRequest)(nil), "pipeline.RestartPipelineRequest") - proto.RegisterType((*RestartPipelineResponse)(nil), "pipeline.RestartPipelineResponse") - proto.RegisterType((*DeletePipelineRequest)(nil), "pipeline.DeletePipelineRequest") - proto.RegisterType((*DeletePipelineResponse)(nil), "pipeline.DeletePipelineResponse") - proto.RegisterType((*WatchStepRequest)(nil), "pipeline.WatchStepRequest") - proto.RegisterType((*StepWatchEvent)(nil), "pipeline.StepWatchEvent") - proto.RegisterType((*PipelineLogsRequest)(nil), "pipeline.PipelineLogsRequest") - proto.RegisterType((*LogEntry)(nil), "pipeline.LogEntry") -} - -func init() { - proto.RegisterFile("pkg/apiclient/pipeline/pipeline.proto", fileDescriptor_71dfb1a81115c785) -} - -var fileDescriptor_71dfb1a81115c785 = []byte{ - // 864 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0xcf, 0x6f, 0xdc, 0x44, - 0x14, 0xd6, 0x24, 0x21, 0x4d, 0x5f, 0x9a, 0xb4, 0x1a, 0xa0, 0x2c, 0x56, 0x9a, 0x6e, 0x47, 0x14, - 0x85, 0xd2, 0x8e, 0xb3, 0x6d, 0x0f, 0xf4, 0x00, 0x12, 0xd0, 0x2a, 0x12, 0x5a, 0x95, 0xe0, 0x80, - 0x10, 0x5c, 0xaa, 0x89, 0x33, 0x78, 0xdd, 0xb5, 0x3d, 0x83, 0x67, 0xba, 0x51, 0x84, 0x0a, 0x02, - 0x89, 0x03, 0x12, 0x37, 0x2e, 0xe5, 0xce, 0x5f, 0xc1, 0x09, 0x89, 0x0b, 0x47, 0x24, 0xfe, 0x01, - 0x14, 0xf1, 0x87, 0xa0, 0x99, 0xf5, 0xf8, 0xc7, 0xae, 0x69, 0x2d, 0xf6, 0xd0, 0xdb, 0xcc, 0xf8, - 0xbd, 0xef, 0x7d, 0xef, 0xf3, 0xf3, 0xe7, 0x81, 0xab, 0x72, 0x1c, 0xf9, 0x4c, 0xc6, 0x61, 0x12, - 0xf3, 0x4c, 0xfb, 0x32, 0x96, 0x3c, 0x89, 0x33, 0x5e, 0x2e, 0xa8, 0xcc, 0x85, 0x16, 0x78, 0xcd, - 0xed, 0xbd, 0xad, 0x48, 0x88, 0x28, 0xe1, 0x26, 0xc7, 0x67, 0x59, 0x26, 0x34, 0xd3, 0xb1, 0xc8, - 0xd4, 0x34, 0xce, 0xbb, 0x3d, 0x7e, 0x4b, 0xd1, 0x58, 0x98, 0xa7, 0x29, 0x0b, 0x47, 0x71, 0xc6, - 0xf3, 0x13, 0xbf, 0x28, 0xa1, 0xfc, 0x94, 0x6b, 0xe6, 0x4f, 0x06, 0x7e, 0xc4, 0x33, 0x9e, 0x33, - 0xcd, 0x8f, 0x8a, 0x2c, 0x52, 0x65, 0xf9, 0xa1, 0xc8, 0x79, 0x5b, 0xcc, 0xfb, 0x51, 0xac, 0x47, - 0x8f, 0x0e, 0x69, 0x28, 0x52, 0x9f, 0xe5, 0x91, 0x90, 0xb9, 0x78, 0x78, 0x23, 0x61, 0x87, 0xca, - 0xee, 0x6e, 0x1c, 0x31, 0xcd, 0xbe, 0x48, 0xc4, 0xb1, 0xc5, 0x98, 0x0c, 0x58, 0x22, 0x47, 0x6c, - 0x0e, 0x84, 0xfc, 0x80, 0xe0, 0xa5, 0x61, 0xac, 0xf4, 0x7e, 0xd1, 0x8d, 0x0a, 0xf8, 0x97, 0x8f, - 0xb8, 0xd2, 0x78, 0x0b, 0xce, 0x66, 0x2c, 0xe5, 0x4a, 0xb2, 0x90, 0xf7, 0x50, 0x1f, 0xed, 0x9c, - 0x0d, 0xaa, 0x03, 0x7c, 0x00, 0xeb, 0x49, 0xac, 0xf4, 0x87, 0xd2, 0xb6, 0xda, 0x5b, 0xea, 0xa3, - 0x9d, 0xf5, 0x9b, 0x03, 0x3a, 0x65, 0x4d, 0xeb, 0xbd, 0x52, 0x39, 0x8e, 0xcc, 0x81, 0xa2, 0xa6, - 0x57, 0x3a, 0x19, 0xd0, 0x61, 0x95, 0x18, 0xd4, 0x51, 0xc8, 0x37, 0x80, 0x1d, 0x8d, 0x4f, 0x99, - 0x0e, 0x47, 0xf7, 0x26, 0x3c, 0xd3, 0x18, 0xc3, 0x8a, 0x3e, 0x91, 0x8e, 0x83, 0x5d, 0xe3, 0x4f, - 0x60, 0x55, 0x1c, 0x3e, 0xe4, 0xa1, 0x2e, 0x2a, 0xbf, 0x4d, 0x2b, 0x2d, 0xa8, 0xd3, 0xe2, 0x81, - 0xd1, 0xc2, 0xee, 0x1e, 0x38, 0x2d, 0x0c, 0x11, 0xea, 0xb4, 0xa0, 0xae, 0x58, 0x50, 0x80, 0x91, - 0x27, 0x08, 0xf0, 0x1e, 0x2f, 0xb5, 0x70, 0x52, 0x60, 0x58, 0x31, 0x9d, 0x3b, 0x06, 0x66, 0xdd, - 0x94, 0x67, 0x69, 0x56, 0x9e, 0x7d, 0x80, 0x88, 0x97, 0xea, 0x2c, 0x5b, 0x8e, 0xbb, 0xdd, 0xd4, - 0xd9, 0x2b, 0xf3, 0x82, 0x1a, 0x06, 0xf9, 0x00, 0x2e, 0x06, 0x5c, 0x69, 0x96, 0x2f, 0xce, 0x8e, - 0xbc, 0x0a, 0xaf, 0xcc, 0x61, 0x29, 0x29, 0x32, 0xc5, 0xc9, 0x2f, 0x08, 0x5e, 0xbe, 0xcb, 0x13, - 0xae, 0xf9, 0xe2, 0x22, 0x7c, 0x06, 0x1b, 0x47, 0x16, 0xaa, 0xa9, 0xc3, 0xad, 0x6e, 0x3a, 0xdc, - 0xad, 0xa7, 0x06, 0x4d, 0x24, 0xd2, 0x83, 0x8b, 0xb3, 0x2c, 0x8b, 0x06, 0xbe, 0x47, 0x70, 0xc1, - 0x0e, 0xcf, 0x81, 0xe6, 0xf2, 0x39, 0xce, 0xf2, 0x31, 0x6c, 0x1a, 0x06, 0xcf, 0x98, 0xe3, 0x8f, - 0x66, 0xe6, 0xf8, 0xce, 0xff, 0x9a, 0x63, 0xdb, 0xaa, 0x9b, 0xe1, 0xdf, 0x10, 0xbc, 0xe8, 0x54, - 0x19, 0x8a, 0xa8, 0xe3, 0xf7, 0xec, 0xde, 0xee, 0x52, 0xed, 0xed, 0x7a, 0xb0, 0xa6, 0x34, 0x97, - 0xf7, 0xcd, 0xf9, 0xb2, 0x3d, 0x2f, 0xf7, 0x26, 0x3e, 0xca, 0xb9, 0xec, 0xad, 0x4c, 0xe3, 0xcd, - 0x1a, 0xef, 0xc1, 0x86, 0x14, 0x47, 0x43, 0x11, 0x39, 0x25, 0x5f, 0xb0, 0x3d, 0x5d, 0xa9, 0x29, - 0x49, 0x8d, 0x97, 0x19, 0xdd, 0xf6, 0xeb, 0x81, 0x41, 0x33, 0x8f, 0xfc, 0x8a, 0x60, 0x6d, 0x28, - 0xa2, 0x7b, 0x99, 0xce, 0x4f, 0x9e, 0xc1, 0x9b, 0xc0, 0x39, 0xe7, 0xc3, 0xf7, 0x2b, 0xfe, 0x8d, - 0xb3, 0xa7, 0xf6, 0xf1, 0x0e, 0xac, 0xe8, 0x38, 0xe5, 0xbd, 0x55, 0x4b, 0xf5, 0x5a, 0xb7, 0x97, - 0xfe, 0x71, 0x9c, 0xf2, 0xc0, 0xe6, 0xe1, 0x0b, 0xb0, 0x9c, 0xaa, 0xa8, 0x77, 0xc6, 0xc2, 0x9a, - 0xe5, 0xcd, 0xdf, 0xcf, 0xc0, 0x79, 0xa7, 0xff, 0x01, 0xcf, 0x27, 0x71, 0xc8, 0xf1, 0xcf, 0x08, - 0x36, 0x1a, 0x26, 0x8b, 0xb7, 0x69, 0xf9, 0x3b, 0x69, 0x73, 0x5f, 0xef, 0xdd, 0x85, 0x0c, 0xcd, - 0x40, 0x92, 0xab, 0xdf, 0xfd, 0xf5, 0xcf, 0x4f, 0x4b, 0x97, 0xf1, 0xa5, 0xe2, 0x07, 0x50, 0xfe, - 0xc0, 0x94, 0xff, 0x55, 0x29, 0xe0, 0x63, 0xfc, 0x35, 0x6c, 0xda, 0x21, 0xed, 0xce, 0x6d, 0xab, - 0x7a, 0x3e, 0x6f, 0xd7, 0xe4, 0xba, 0x2d, 0xfb, 0x3a, 0x7e, 0xcd, 0x95, 0x55, 0x3a, 0xe7, 0x2c, - 0x6d, 0xaf, 0xbe, 0x8b, 0xf0, 0x13, 0x04, 0xeb, 0x35, 0xcf, 0xc5, 0x35, 0xf4, 0x79, 0x2b, 0xf6, - 0x16, 0x33, 0xfa, 0x79, 0x72, 0xad, 0xac, 0xa6, 0xeb, 0xc7, 0xf8, 0x47, 0x04, 0xe7, 0x67, 0x8c, - 0x12, 0xf7, 0x2b, 0x7a, 0xed, 0x7e, 0xec, 0x5d, 0x79, 0x4a, 0x44, 0x61, 0x52, 0xb7, 0x2d, 0x0d, - 0x4a, 0xae, 0x77, 0xa1, 0xe1, 0xe7, 0x53, 0x14, 0xfc, 0x2d, 0x82, 0xcd, 0xa6, 0xeb, 0xe1, 0xcb, - 0x55, 0xad, 0x56, 0xd7, 0xf6, 0xfa, 0xff, 0x1d, 0x50, 0x70, 0x29, 0x24, 0xb9, 0xd6, 0x4d, 0x92, - 0x09, 0x9c, 0xab, 0x9b, 0x0b, 0xbe, 0x34, 0x3f, 0x0b, 0x35, 0xd3, 0xf1, 0x70, 0x6d, 0x94, 0x8a, - 0x0f, 0x9a, 0x0c, 0x6c, 0xc1, 0x37, 0xf1, 0x1b, 0x5d, 0x06, 0xc4, 0x4f, 0x44, 0xa4, 0x76, 0x11, - 0xce, 0x00, 0x4a, 0x57, 0x57, 0xd8, 0xab, 0x60, 0x67, 0xbd, 0xde, 0xeb, 0x55, 0xcf, 0x9a, 0x06, - 0x4c, 0x76, 0x6c, 0x61, 0x82, 0xfb, 0x33, 0x85, 0x8d, 0x19, 0xcc, 0x4c, 0xe5, 0x7b, 0x7b, 0x7f, - 0x9c, 0x6e, 0xa3, 0x3f, 0x4f, 0xb7, 0xd1, 0xdf, 0xa7, 0xdb, 0xe8, 0xf3, 0x3b, 0x2d, 0x37, 0xad, - 0xe9, 0x25, 0xeb, 0x58, 0xe4, 0x63, 0x33, 0x6f, 0xca, 0x6f, 0xbf, 0x35, 0x1e, 0xae, 0xda, 0x6b, - 0xd6, 0xad, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xb8, 0xa0, 0x67, 0xda, 0x56, 0x0a, 0x00, 0x00, -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// PipelineServiceClient is the client API for PipelineService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type PipelineServiceClient interface { - ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*v1alpha1.PipelineList, error) - WatchPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (PipelineService_WatchPipelinesClient, error) - GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*v1alpha1.Pipeline, error) - RestartPipeline(ctx context.Context, in *RestartPipelineRequest, opts ...grpc.CallOption) (*RestartPipelineResponse, error) - DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*DeletePipelineResponse, error) - PipelineLogs(ctx context.Context, in *PipelineLogsRequest, opts ...grpc.CallOption) (PipelineService_PipelineLogsClient, error) - WatchSteps(ctx context.Context, in *WatchStepRequest, opts ...grpc.CallOption) (PipelineService_WatchStepsClient, error) -} - -type pipelineServiceClient struct { - cc *grpc.ClientConn -} - -func NewPipelineServiceClient(cc *grpc.ClientConn) PipelineServiceClient { - return &pipelineServiceClient{cc} -} - -func (c *pipelineServiceClient) ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*v1alpha1.PipelineList, error) { - out := new(v1alpha1.PipelineList) - err := c.cc.Invoke(ctx, "/pipeline.PipelineService/ListPipelines", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) WatchPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (PipelineService_WatchPipelinesClient, error) { - stream, err := c.cc.NewStream(ctx, &_PipelineService_serviceDesc.Streams[0], "/pipeline.PipelineService/WatchPipelines", opts...) - if err != nil { - return nil, err - } - x := &pipelineServiceWatchPipelinesClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type PipelineService_WatchPipelinesClient interface { - Recv() (*PipelineWatchEvent, error) - grpc.ClientStream -} - -type pipelineServiceWatchPipelinesClient struct { - grpc.ClientStream -} - -func (x *pipelineServiceWatchPipelinesClient) Recv() (*PipelineWatchEvent, error) { - m := new(PipelineWatchEvent) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *pipelineServiceClient) GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*v1alpha1.Pipeline, error) { - out := new(v1alpha1.Pipeline) - err := c.cc.Invoke(ctx, "/pipeline.PipelineService/GetPipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) RestartPipeline(ctx context.Context, in *RestartPipelineRequest, opts ...grpc.CallOption) (*RestartPipelineResponse, error) { - out := new(RestartPipelineResponse) - err := c.cc.Invoke(ctx, "/pipeline.PipelineService/RestartPipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*DeletePipelineResponse, error) { - out := new(DeletePipelineResponse) - err := c.cc.Invoke(ctx, "/pipeline.PipelineService/DeletePipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) PipelineLogs(ctx context.Context, in *PipelineLogsRequest, opts ...grpc.CallOption) (PipelineService_PipelineLogsClient, error) { - stream, err := c.cc.NewStream(ctx, &_PipelineService_serviceDesc.Streams[1], "/pipeline.PipelineService/PipelineLogs", opts...) - if err != nil { - return nil, err - } - x := &pipelineServicePipelineLogsClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type PipelineService_PipelineLogsClient interface { - Recv() (*LogEntry, error) - grpc.ClientStream -} - -type pipelineServicePipelineLogsClient struct { - grpc.ClientStream -} - -func (x *pipelineServicePipelineLogsClient) Recv() (*LogEntry, error) { - m := new(LogEntry) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *pipelineServiceClient) WatchSteps(ctx context.Context, in *WatchStepRequest, opts ...grpc.CallOption) (PipelineService_WatchStepsClient, error) { - stream, err := c.cc.NewStream(ctx, &_PipelineService_serviceDesc.Streams[2], "/pipeline.PipelineService/WatchSteps", opts...) - if err != nil { - return nil, err - } - x := &pipelineServiceWatchStepsClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type PipelineService_WatchStepsClient interface { - Recv() (*StepWatchEvent, error) - grpc.ClientStream -} - -type pipelineServiceWatchStepsClient struct { - grpc.ClientStream -} - -func (x *pipelineServiceWatchStepsClient) Recv() (*StepWatchEvent, error) { - m := new(StepWatchEvent) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -// PipelineServiceServer is the server API for PipelineService service. -type PipelineServiceServer interface { - ListPipelines(context.Context, *ListPipelinesRequest) (*v1alpha1.PipelineList, error) - WatchPipelines(*ListPipelinesRequest, PipelineService_WatchPipelinesServer) error - GetPipeline(context.Context, *GetPipelineRequest) (*v1alpha1.Pipeline, error) - RestartPipeline(context.Context, *RestartPipelineRequest) (*RestartPipelineResponse, error) - DeletePipeline(context.Context, *DeletePipelineRequest) (*DeletePipelineResponse, error) - PipelineLogs(*PipelineLogsRequest, PipelineService_PipelineLogsServer) error - WatchSteps(*WatchStepRequest, PipelineService_WatchStepsServer) error -} - -// UnimplementedPipelineServiceServer can be embedded to have forward compatible implementations. -type UnimplementedPipelineServiceServer struct { -} - -func (*UnimplementedPipelineServiceServer) ListPipelines(ctx context.Context, req *ListPipelinesRequest) (*v1alpha1.PipelineList, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListPipelines not implemented") -} -func (*UnimplementedPipelineServiceServer) WatchPipelines(req *ListPipelinesRequest, srv PipelineService_WatchPipelinesServer) error { - return status.Errorf(codes.Unimplemented, "method WatchPipelines not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipeline(ctx context.Context, req *GetPipelineRequest) (*v1alpha1.Pipeline, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetPipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) RestartPipeline(ctx context.Context, req *RestartPipelineRequest) (*RestartPipelineResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RestartPipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) DeletePipeline(ctx context.Context, req *DeletePipelineRequest) (*DeletePipelineResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeletePipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) PipelineLogs(req *PipelineLogsRequest, srv PipelineService_PipelineLogsServer) error { - return status.Errorf(codes.Unimplemented, "method PipelineLogs not implemented") -} -func (*UnimplementedPipelineServiceServer) WatchSteps(req *WatchStepRequest, srv PipelineService_WatchStepsServer) error { - return status.Errorf(codes.Unimplemented, "method WatchSteps not implemented") -} - -func RegisterPipelineServiceServer(s *grpc.Server, srv PipelineServiceServer) { - s.RegisterService(&_PipelineService_serviceDesc, srv) -} - -func _PipelineService_ListPipelines_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPipelinesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).ListPipelines(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/pipeline.PipelineService/ListPipelines", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).ListPipelines(ctx, req.(*ListPipelinesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_WatchPipelines_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(ListPipelinesRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(PipelineServiceServer).WatchPipelines(m, &pipelineServiceWatchPipelinesServer{stream}) -} - -type PipelineService_WatchPipelinesServer interface { - Send(*PipelineWatchEvent) error - grpc.ServerStream -} - -type pipelineServiceWatchPipelinesServer struct { - grpc.ServerStream -} - -func (x *pipelineServiceWatchPipelinesServer) Send(m *PipelineWatchEvent) error { - return x.ServerStream.SendMsg(m) -} - -func _PipelineService_GetPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/pipeline.PipelineService/GetPipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipeline(ctx, req.(*GetPipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_RestartPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RestartPipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).RestartPipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/pipeline.PipelineService/RestartPipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).RestartPipeline(ctx, req.(*RestartPipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_DeletePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeletePipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).DeletePipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/pipeline.PipelineService/DeletePipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).DeletePipeline(ctx, req.(*DeletePipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_PipelineLogs_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(PipelineLogsRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(PipelineServiceServer).PipelineLogs(m, &pipelineServicePipelineLogsServer{stream}) -} - -type PipelineService_PipelineLogsServer interface { - Send(*LogEntry) error - grpc.ServerStream -} - -type pipelineServicePipelineLogsServer struct { - grpc.ServerStream -} - -func (x *pipelineServicePipelineLogsServer) Send(m *LogEntry) error { - return x.ServerStream.SendMsg(m) -} - -func _PipelineService_WatchSteps_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(WatchStepRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(PipelineServiceServer).WatchSteps(m, &pipelineServiceWatchStepsServer{stream}) -} - -type PipelineService_WatchStepsServer interface { - Send(*StepWatchEvent) error - grpc.ServerStream -} - -type pipelineServiceWatchStepsServer struct { - grpc.ServerStream -} - -func (x *pipelineServiceWatchStepsServer) Send(m *StepWatchEvent) error { - return x.ServerStream.SendMsg(m) -} - -var _PipelineService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "pipeline.PipelineService", - HandlerType: (*PipelineServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "ListPipelines", - Handler: _PipelineService_ListPipelines_Handler, - }, - { - MethodName: "GetPipeline", - Handler: _PipelineService_GetPipeline_Handler, - }, - { - MethodName: "RestartPipeline", - Handler: _PipelineService_RestartPipeline_Handler, - }, - { - MethodName: "DeletePipeline", - Handler: _PipelineService_DeletePipeline_Handler, - }, - }, - Streams: []grpc.StreamDesc{ - { - StreamName: "WatchPipelines", - Handler: _PipelineService_WatchPipelines_Handler, - ServerStreams: true, - }, - { - StreamName: "PipelineLogs", - Handler: _PipelineService_PipelineLogs_Handler, - ServerStreams: true, - }, - { - StreamName: "WatchSteps", - Handler: _PipelineService_WatchSteps_Handler, - ServerStreams: true, - }, - }, - Metadata: "pkg/apiclient/pipeline/pipeline.proto", -} - -func (m *ListPipelinesRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *ListPipelinesRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *ListPipelinesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.ListOptions != nil { - { - size, err := m.ListOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *PipelineWatchEvent) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PipelineWatchEvent) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *PipelineWatchEvent) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.Object != nil { - { - size, err := m.Object.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if len(m.Type) > 0 { - i -= len(m.Type) - copy(dAtA[i:], m.Type) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Type))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetPipelineRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetPipelineRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *GetPipelineRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.GetOptions != nil { - { - size, err := m.GetOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x1a - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0x12 - } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Name))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *RestartPipelineRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *RestartPipelineRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *RestartPipelineRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0x12 - } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Name))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *RestartPipelineResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *RestartPipelineResponse) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *RestartPipelineResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - return len(dAtA) - i, nil -} - -func (m *DeletePipelineRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *DeletePipelineRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *DeletePipelineRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.DeleteOptions != nil { - { - size, err := m.DeleteOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x1a - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0x12 - } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Name))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *DeletePipelineResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *DeletePipelineResponse) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *DeletePipelineResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - return len(dAtA) - i, nil -} - -func (m *WatchStepRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *WatchStepRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *WatchStepRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.ListOptions != nil { - { - size, err := m.ListOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *StepWatchEvent) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *StepWatchEvent) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *StepWatchEvent) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.Object != nil { - { - size, err := m.Object.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if len(m.Type) > 0 { - i -= len(m.Type) - copy(dAtA[i:], m.Type) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Type))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *PipelineLogsRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PipelineLogsRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *PipelineLogsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.PodLogOptions != nil { - { - size, err := m.PodLogOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x2a - } - if len(m.Grep) > 0 { - i -= len(m.Grep) - copy(dAtA[i:], m.Grep) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Grep))) - i-- - dAtA[i] = 0x22 - } - if len(m.StepName) > 0 { - i -= len(m.StepName) - copy(dAtA[i:], m.StepName) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.StepName))) - i-- - dAtA[i] = 0x1a - } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Name))) - i-- - dAtA[i] = 0x12 - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *LogEntry) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *LogEntry) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *LogEntry) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if len(m.Msg) > 0 { - i -= len(m.Msg) - copy(dAtA[i:], m.Msg) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Msg))) - i-- - dAtA[i] = 0x3a - } - if m.Time != nil { - { - size, err := m.Time.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintPipeline(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x32 - } - if len(m.StepName) > 0 { - i -= len(m.StepName) - copy(dAtA[i:], m.StepName) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.StepName))) - i-- - dAtA[i] = 0x1a - } - if len(m.PipelineName) > 0 { - i -= len(m.PipelineName) - copy(dAtA[i:], m.PipelineName) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.PipelineName))) - i-- - dAtA[i] = 0x12 - } - if len(m.Namespace) > 0 { - i -= len(m.Namespace) - copy(dAtA[i:], m.Namespace) - i = encodeVarintPipeline(dAtA, i, uint64(len(m.Namespace))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func encodeVarintPipeline(dAtA []byte, offset int, v uint64) int { - offset -= sovPipeline(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *ListPipelinesRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.ListOptions != nil { - l = m.ListOptions.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *PipelineWatchEvent) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Type) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.Object != nil { - l = m.Object.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *GetPipelineRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Name) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.GetOptions != nil { - l = m.GetOptions.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *RestartPipelineRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Name) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *RestartPipelineResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *DeletePipelineRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Name) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.DeleteOptions != nil { - l = m.DeleteOptions.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *DeletePipelineResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *WatchStepRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.ListOptions != nil { - l = m.ListOptions.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *StepWatchEvent) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Type) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.Object != nil { - l = m.Object.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *PipelineLogsRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Name) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.StepName) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Grep) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.PodLogOptions != nil { - l = m.PodLogOptions.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *LogEntry) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Namespace) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.PipelineName) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.StepName) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.Time != nil { - l = m.Time.Size() - n += 1 + l + sovPipeline(uint64(l)) - } - l = len(m.Msg) - if l > 0 { - n += 1 + l + sovPipeline(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func sovPipeline(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozPipeline(x uint64) (n int) { - return sovPipeline(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *ListPipelinesRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ListPipelinesRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ListPipelinesRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ListOptions", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.ListOptions == nil { - m.ListOptions = &v1.ListOptions{} - } - if err := m.ListOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PipelineWatchEvent) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PipelineWatchEvent: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PipelineWatchEvent: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Type = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Object", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Object == nil { - m.Object = &v1alpha1.Pipeline{} - } - if err := m.Object.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetPipelineRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetPipelineRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetPipelineRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field GetOptions", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.GetOptions == nil { - m.GetOptions = &v1.GetOptions{} - } - if err := m.GetOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RestartPipelineRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RestartPipelineRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RestartPipelineRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RestartPipelineResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RestartPipelineResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RestartPipelineResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *DeletePipelineRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: DeletePipelineRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: DeletePipelineRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DeleteOptions", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.DeleteOptions == nil { - m.DeleteOptions = &v1.DeleteOptions{} - } - if err := m.DeleteOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *DeletePipelineResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: DeletePipelineResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: DeletePipelineResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *WatchStepRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: WatchStepRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: WatchStepRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ListOptions", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.ListOptions == nil { - m.ListOptions = &v1.ListOptions{} - } - if err := m.ListOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *StepWatchEvent) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: StepWatchEvent: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: StepWatchEvent: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Type = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Object", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Object == nil { - m.Object = &v1alpha1.Step{} - } - if err := m.Object.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PipelineLogsRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PipelineLogsRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PipelineLogsRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field StepName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.StepName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Grep", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Grep = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PodLogOptions", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.PodLogOptions == nil { - m.PodLogOptions = &v11.PodLogOptions{} - } - if err := m.PodLogOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *LogEntry) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: LogEntry: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: LogEntry: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Namespace = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PipelineName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PipelineName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field StepName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.StepName = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Time", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Time == nil { - m.Time = &v1.Time{} - } - if err := m.Time.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 7: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Msg", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowPipeline - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthPipeline - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthPipeline - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Msg = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipPipeline(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthPipeline - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipPipeline(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowPipeline - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowPipeline - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowPipeline - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthPipeline - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupPipeline - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthPipeline - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthPipeline = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowPipeline = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupPipeline = fmt.Errorf("proto: unexpected end of group") -) diff --git a/pkg/apiclient/pipeline/pipeline.pb.gw.go b/pkg/apiclient/pipeline/pipeline.pb.gw.go deleted file mode 100644 index d759ffa6b373..000000000000 --- a/pkg/apiclient/pipeline/pipeline.pb.gw.go +++ /dev/null @@ -1,843 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: pkg/apiclient/pipeline/pipeline.proto - -/* -Package pipeline is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package pipeline - -import ( - "context" - "io" - "net/http" - - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - -var ( - filter_PipelineService_ListPipelines_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) - -func request_PipelineService_ListPipelines_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelines_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.ListPipelines(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_PipelineService_ListPipelines_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelines_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.ListPipelines(ctx, &protoReq) - return msg, metadata, err - -} - -var ( - filter_PipelineService_WatchPipelines_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) - -func request_PipelineService_WatchPipelines_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (PipelineService_WatchPipelinesClient, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_WatchPipelines_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - stream, err := client.WatchPipelines(ctx, &protoReq) - if err != nil { - return nil, metadata, err - } - header, err := stream.Header() - if err != nil { - return nil, metadata, err - } - metadata.HeaderMD = header - return stream, metadata, nil - -} - -var ( - filter_PipelineService_GetPipeline_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "name": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} -) - -func request_PipelineService_GetPipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_GetPipeline_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetPipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_PipelineService_GetPipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_GetPipeline_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.GetPipeline(ctx, &protoReq) - return msg, metadata, err - -} - -func request_PipelineService_RestartPipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RestartPipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - msg, err := client.RestartPipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_PipelineService_RestartPipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RestartPipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - msg, err := server.RestartPipeline(ctx, &protoReq) - return msg, metadata, err - -} - -var ( - filter_PipelineService_DeletePipeline_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "name": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} -) - -func request_PipelineService_DeletePipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_DeletePipeline_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.DeletePipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_PipelineService_DeletePipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_DeletePipeline_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.DeletePipeline(ctx, &protoReq) - return msg, metadata, err - -} - -var ( - filter_PipelineService_PipelineLogs_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) - -func request_PipelineService_PipelineLogs_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (PipelineService_PipelineLogsClient, runtime.ServerMetadata, error) { - var protoReq PipelineLogsRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_PipelineLogs_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - stream, err := client.PipelineLogs(ctx, &protoReq) - if err != nil { - return nil, metadata, err - } - header, err := stream.Header() - if err != nil { - return nil, metadata, err - } - metadata.HeaderMD = header - return stream, metadata, nil - -} - -var ( - filter_PipelineService_WatchSteps_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) - -func request_PipelineService_WatchSteps_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (PipelineService_WatchStepsClient, runtime.ServerMetadata, error) { - var protoReq WatchStepRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["namespace"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") - } - - protoReq.Namespace, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_WatchSteps_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - stream, err := client.WatchSteps(ctx, &protoReq) - if err != nil { - return nil, metadata, err - } - header, err := stream.Header() - if err != nil { - return nil, metadata, err - } - metadata.HeaderMD = header - return stream, metadata, nil - -} - -// RegisterPipelineServiceHandlerServer registers the http handlers for service PipelineService to "mux". -// UnaryRPC :call PipelineServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterPipelineServiceHandlerFromEndpoint instead. -func RegisterPipelineServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server PipelineServiceServer) error { - - mux.Handle("GET", pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_PipelineService_ListPipelines_0(rctx, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_ListPipelines_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_WatchPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - err := status.Error(codes.Unimplemented, "streaming calls are not yet supported in the in-process transport") - _, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - }) - - mux.Handle("GET", pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_PipelineService_GetPipeline_0(rctx, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_GetPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("POST", pattern_PipelineService_RestartPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_PipelineService_RestartPipeline_0(rctx, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_RestartPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_PipelineService_DeletePipeline_0(rctx, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_DeletePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_PipelineLogs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - err := status.Error(codes.Unimplemented, "streaming calls are not yet supported in the in-process transport") - _, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - }) - - mux.Handle("GET", pattern_PipelineService_WatchSteps_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - err := status.Error(codes.Unimplemented, "streaming calls are not yet supported in the in-process transport") - _, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - }) - - return nil -} - -// RegisterPipelineServiceHandlerFromEndpoint is same as RegisterPipelineServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterPipelineServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterPipelineServiceHandler(ctx, mux, conn) -} - -// RegisterPipelineServiceHandler registers the http handlers for service PipelineService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterPipelineServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterPipelineServiceHandlerClient(ctx, mux, NewPipelineServiceClient(conn)) -} - -// RegisterPipelineServiceHandlerClient registers the http handlers for service PipelineService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "PipelineServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "PipelineServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "PipelineServiceClient" to call the correct interceptors. -func RegisterPipelineServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client PipelineServiceClient) error { - - mux.Handle("GET", pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_ListPipelines_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_ListPipelines_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_WatchPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_WatchPipelines_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_WatchPipelines_0(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_GetPipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_GetPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("POST", pattern_PipelineService_RestartPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_RestartPipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_RestartPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_DeletePipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_DeletePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_PipelineLogs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_PipelineLogs_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_PipelineLogs_0(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_PipelineService_WatchSteps_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_PipelineService_WatchSteps_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_PipelineService_WatchSteps_0(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_PipelineService_ListPipelines_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "pipelines", "namespace"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_WatchPipelines_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "stream", "pipelines", "namespace"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "pipelines", "namespace", "name"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_RestartPipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "pipelines", "namespace", "name", "restart"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_DeletePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "pipelines", "namespace", "name"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_PipelineLogs_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "stream", "pipelines", "namespace", "logs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_WatchSteps_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "stream", "steps", "namespace"}, "", runtime.AssumeColonVerbOpt(true))) -) - -var ( - forward_PipelineService_ListPipelines_0 = runtime.ForwardResponseMessage - - forward_PipelineService_WatchPipelines_0 = runtime.ForwardResponseStream - - forward_PipelineService_GetPipeline_0 = runtime.ForwardResponseMessage - - forward_PipelineService_RestartPipeline_0 = runtime.ForwardResponseMessage - - forward_PipelineService_DeletePipeline_0 = runtime.ForwardResponseMessage - - forward_PipelineService_PipelineLogs_0 = runtime.ForwardResponseStream - - forward_PipelineService_WatchSteps_0 = runtime.ForwardResponseStream -) diff --git a/pkg/apiclient/pipeline/pipeline.proto b/pkg/apiclient/pipeline/pipeline.proto deleted file mode 100644 index 3ec84ef13482..000000000000 --- a/pkg/apiclient/pipeline/pipeline.proto +++ /dev/null @@ -1,97 +0,0 @@ -syntax = "proto3"; -option go_package = "github.com/argoproj/argo-workflows/pkg/apiclient/pipeline"; - -import "google/api/annotations.proto"; -import "k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto"; -import "k8s.io/api/core/v1/generated.proto"; -import "github.com/argoproj-labs/argo-dataflow/api/v1alpha1/generated.proto"; - -package pipeline; - -message ListPipelinesRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; -} - -message PipelineWatchEvent { - string type = 1; - github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline object = 2; -} - -message GetPipelineRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; -} - -message RestartPipelineRequest { - string name = 1; - string namespace = 2; -} - -message RestartPipelineResponse { -} - -message DeletePipelineRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; -} - -message DeletePipelineResponse { -} - -message WatchStepRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; -} - -message StepWatchEvent { - string type = 1; - github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Step object = 2; -} - -message PipelineLogsRequest { - string namespace = 1; - // optional - only return entries for this pipeline - string name = 2; - // optional - only return entries for this step - string stepName = 3; - // optional - only return entries which match this expresssion - string grep = 4; - k8s.io.api.core.v1.PodLogOptions podLogOptions = 5; -} - -// structured log entry -message LogEntry { - string namespace = 1; - string pipelineName = 2; - string stepName = 3; - k8s.io.apimachinery.pkg.apis.meta.v1.Time time = 6; - string msg = 7; -} - - -service PipelineService { - rpc ListPipelines (ListPipelinesRequest) returns (github.com.argoproj_labs.argo_dataflow.api.v1alpha1.PipelineList) { - option (google.api.http).get = "/api/v1/pipelines/{namespace}"; - } - rpc WatchPipelines (ListPipelinesRequest) returns (stream PipelineWatchEvent) { - option (google.api.http).get = "/api/v1/stream/pipelines/{namespace}"; - } - rpc GetPipeline (GetPipelineRequest) returns (github.com.argoproj_labs.argo_dataflow.api.v1alpha1.Pipeline) { - option (google.api.http).get = "/api/v1/pipelines/{namespace}/{name}"; - } - rpc RestartPipeline (RestartPipelineRequest) returns (RestartPipelineResponse) { - option (google.api.http).post = "/api/v1/pipelines/{namespace}/{name}/restart"; - } - rpc DeletePipeline (DeletePipelineRequest) returns (DeletePipelineResponse) { - option (google.api.http).delete = "/api/v1/pipelines/{namespace}/{name}"; - } - rpc PipelineLogs (PipelineLogsRequest) returns (stream LogEntry) { - option (google.api.http).get = "/api/v1/stream/pipelines/{namespace}/logs"; - } - rpc WatchSteps (WatchStepRequest) returns (stream StepWatchEvent) { - option (google.api.http).get = "/api/v1/stream/steps/{namespace}"; - } -} diff --git a/pkg/apiclient/sensor/sensor.proto b/pkg/apiclient/sensor/sensor.proto index c1d6d9bc9b09..07b9a595ad93 100644 --- a/pkg/apiclient/sensor/sensor.proto +++ b/pkg/apiclient/sensor/sensor.proto @@ -9,94 +9,94 @@ import "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1/generated.proto package sensor; message ListSensorsRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } message CreateSensorRequest { - string namespace = 1; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; + string namespace = 1; + github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; } message GetSensorRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; } message UpdateSensorRequest { - string namespace = 1; - string name = 2; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 3; + string namespace = 1; + string name = 2; + github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 3; } message DeleteSensorRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; } message DeleteSensorResponse { } message SensorsLogsRequest { - string namespace = 1; - // optional - only return entries for this sensor name - string name = 2; - // optional - only return entries for this trigger - string triggerName = 3; - // option - only return entries where `msg` contains this regular expressions - string grep = 4; - k8s.io.api.core.v1.PodLogOptions podLogOptions = 5; + string namespace = 1; + // optional - only return entries for this sensor name + string name = 2; + // optional - only return entries for this trigger + string triggerName = 3; + // option - only return entries where `msg` contains this regular expressions + string grep = 4; + k8s.io.api.core.v1.PodLogOptions podLogOptions = 5; } // structured log entry message LogEntry { - string namespace = 1; - string sensorName = 2; - // optional - any trigger name - string triggerName = 3; - string level = 5; - k8s.io.apimachinery.pkg.apis.meta.v1.Time time = 6; - string msg = 7; - // optional - trigger dependency name - string dependencyName = 8; - // optional - Cloud Event context - string eventContext = 9; + string namespace = 1; + string sensorName = 2; + // optional - any trigger name + string triggerName = 3; + string level = 5; + k8s.io.apimachinery.pkg.apis.meta.v1.Time time = 6; + string msg = 7; + // optional - trigger dependency name + string dependencyName = 8; + // optional - Cloud Event context + string eventContext = 9; } message SensorWatchEvent { - string type = 1; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor object = 2; + string type = 1; + github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor object = 2; } service SensorService { - rpc ListSensors (ListSensorsRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.SensorList) { - option (google.api.http).get = "/api/v1/sensors/{namespace}"; - } - rpc SensorsLogs (SensorsLogsRequest) returns (stream LogEntry) { - option (google.api.http).get = "/api/v1/stream/sensors/{namespace}/logs"; - } - rpc WatchSensors (ListSensorsRequest) returns (stream SensorWatchEvent) { - option (google.api.http).get = "/api/v1/stream/sensors/{namespace}"; - } - rpc CreateSensor (CreateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { - option (google.api.http) = { - post: "/api/v1/sensors/{namespace}" - body: "*" - }; - } - rpc UpdateSensor (UpdateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { - option (google.api.http) = { - put: "/api/v1/sensors/{namespace}/{name}" - body: "*" - }; - } - rpc DeleteSensor (DeleteSensorRequest) returns (DeleteSensorResponse) { - option (google.api.http).delete = "/api/v1/sensors/{namespace}/{name}"; - } - rpc GetSensor (GetSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { - option (google.api.http).get = "/api/v1/sensors/{namespace}/{name}"; - } + rpc ListSensors(ListSensorsRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.SensorList) { + option (google.api.http).get = "/api/v1/sensors/{namespace}"; + } + rpc SensorsLogs(SensorsLogsRequest) returns (stream LogEntry) { + option (google.api.http).get = "/api/v1/stream/sensors/{namespace}/logs"; + } + rpc WatchSensors(ListSensorsRequest) returns (stream SensorWatchEvent) { + option (google.api.http).get = "/api/v1/stream/sensors/{namespace}"; + } + rpc CreateSensor(CreateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + option (google.api.http) = { + post : "/api/v1/sensors/{namespace}" + body : "*" + }; + } + rpc UpdateSensor(UpdateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + option (google.api.http) = { + put : "/api/v1/sensors/{namespace}/{name}" + body : "*" + }; + } + rpc DeleteSensor(DeleteSensorRequest) returns (DeleteSensorResponse) { + option (google.api.http).delete = "/api/v1/sensors/{namespace}/{name}"; + } + rpc GetSensor(GetSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + option (google.api.http).get = "/api/v1/sensors/{namespace}/{name}"; + } } diff --git a/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go b/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go index bed8da3ff20f..226a1c1a24ae 100644 --- a/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go +++ b/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.9.4. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/apiclient/workflow/workflow.pb.go b/pkg/apiclient/workflow/workflow.pb.go index 6253bc258375..0733f16480d2 100644 --- a/pkg/apiclient/workflow/workflow.pb.go +++ b/pkg/apiclient/workflow/workflow.pb.go @@ -255,6 +255,7 @@ type WorkflowResubmitRequest struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` Memoized bool `protobuf:"varint,3,opt,name=memoized,proto3" json:"memoized,omitempty"` + Parameters []string `protobuf:"bytes,5,rep,name=parameters,proto3" json:"parameters,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -314,11 +315,19 @@ func (m *WorkflowResubmitRequest) GetMemoized() bool { return false } +func (m *WorkflowResubmitRequest) GetParameters() []string { + if m != nil { + return m.Parameters + } + return nil +} + type WorkflowRetryRequest struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` RestartSuccessful bool `protobuf:"varint,3,opt,name=restartSuccessful,proto3" json:"restartSuccessful,omitempty"` NodeFieldSelector string `protobuf:"bytes,4,opt,name=nodeFieldSelector,proto3" json:"nodeFieldSelector,omitempty"` + Parameters []string `protobuf:"bytes,5,rep,name=parameters,proto3" json:"parameters,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -385,6 +394,13 @@ func (m *WorkflowRetryRequest) GetNodeFieldSelector() string { return "" } +func (m *WorkflowRetryRequest) GetParameters() []string { + if m != nil { + return m.Parameters + } + return nil +} + type WorkflowResumeRequest struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` @@ -807,6 +823,7 @@ type WorkflowDeleteRequest struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` DeleteOptions *v1.DeleteOptions `protobuf:"bytes,3,opt,name=deleteOptions,proto3" json:"deleteOptions,omitempty"` + Force bool `protobuf:"varint,4,opt,name=force,proto3" json:"force,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -866,6 +883,13 @@ func (m *WorkflowDeleteRequest) GetDeleteOptions() *v1.DeleteOptions { return nil } +func (m *WorkflowDeleteRequest) GetForce() bool { + if m != nil { + return m.Force + } + return false +} + type WorkflowDeleteResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1288,96 +1312,98 @@ func init() { } var fileDescriptor_1f6bb75f9e833cb6 = []byte{ - // 1416 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0x4f, 0x6f, 0xdc, 0xc4, - 0x1b, 0xc7, 0x35, 0x9b, 0x36, 0x4d, 0x26, 0x7f, 0xda, 0xce, 0xaf, 0xed, 0x6f, 0xb1, 0xda, 0x34, - 0x9d, 0x52, 0x48, 0xd3, 0xc6, 0xce, 0x26, 0x05, 0x5a, 0x24, 0x90, 0x68, 0x53, 0x22, 0xca, 0x52, - 0x2a, 0x2f, 0x12, 0x82, 0x0b, 0x72, 0xbc, 0x13, 0xc7, 0x8d, 0xed, 0x31, 0x33, 0xb3, 0x5b, 0x85, - 0x52, 0x24, 0xb8, 0xc0, 0x01, 0x89, 0x03, 0x47, 0x2e, 0x08, 0x81, 0xe0, 0x80, 0x00, 0x21, 0x21, - 0x21, 0x21, 0x21, 0x8e, 0x1c, 0x2b, 0xf5, 0xca, 0x01, 0x55, 0xbc, 0x01, 0xde, 0x01, 0x9a, 0xb1, - 0x3d, 0xb6, 0xb3, 0xdb, 0xc5, 0x24, 0x1b, 0xe8, 0x6d, 0xc6, 0xf6, 0xcc, 0xf3, 0x99, 0xef, 0x33, - 0xf3, 0x3c, 0xcf, 0x18, 0x9e, 0x89, 0x37, 0x3d, 0xcb, 0x89, 0x7d, 0x37, 0xf0, 0x49, 0x24, 0xac, - 0x5b, 0x94, 0x6d, 0xae, 0x07, 0xf4, 0x96, 0x6e, 0x98, 0x31, 0xa3, 0x82, 0xa2, 0xb1, 0xac, 0x6f, - 0x1c, 0xf7, 0x28, 0xf5, 0x02, 0x22, 0xc7, 0x58, 0x4e, 0x14, 0x51, 0xe1, 0x08, 0x9f, 0x46, 0x3c, - 0xf9, 0xce, 0xb8, 0xb0, 0x79, 0x91, 0x9b, 0x3e, 0x95, 0x6f, 0x43, 0xc7, 0xdd, 0xf0, 0x23, 0xc2, - 0xb6, 0xac, 0xd4, 0x04, 0xb7, 0x42, 0x22, 0x1c, 0xab, 0xdb, 0xb0, 0x3c, 0x12, 0x11, 0xe6, 0x08, - 0xd2, 0x4e, 0x47, 0xbd, 0xe4, 0xf9, 0x62, 0xa3, 0xb3, 0x66, 0xba, 0x34, 0xb4, 0x1c, 0xe6, 0xd1, - 0x98, 0xd1, 0x9b, 0xaa, 0xb1, 0x90, 0x99, 0xe5, 0xf9, 0x24, 0x1a, 0xb1, 0xdb, 0x70, 0x82, 0x78, - 0xc3, 0xe9, 0x9d, 0x0e, 0xe7, 0x10, 0x96, 0x4b, 0x19, 0xe9, 0x63, 0x12, 0xff, 0x52, 0x83, 0x47, - 0x5f, 0x4d, 0x67, 0xba, 0xc2, 0x88, 0x23, 0x88, 0x4d, 0xde, 0xec, 0x10, 0x2e, 0xd0, 0x71, 0x38, - 0x1e, 0x39, 0x21, 0xe1, 0xb1, 0xe3, 0x92, 0x3a, 0x98, 0x05, 0x73, 0xe3, 0x76, 0xfe, 0x00, 0xad, - 0x43, 0x2d, 0x45, 0xbd, 0x36, 0x0b, 0xe6, 0x26, 0x96, 0xae, 0x99, 0x39, 0xbd, 0x99, 0xd1, 0xab, - 0xc6, 0x1b, 0x9a, 0xde, 0xec, 0x2e, 0x9b, 0xf1, 0xa6, 0x67, 0xca, 0x05, 0x98, 0x5a, 0xda, 0x6c, - 0x01, 0x66, 0x06, 0x62, 0xeb, 0xb9, 0x11, 0x86, 0xd0, 0x8f, 0xb8, 0x70, 0x22, 0x97, 0xbc, 0xb0, - 0x52, 0x1f, 0x91, 0x18, 0x97, 0x6b, 0x75, 0x60, 0x17, 0x9e, 0x22, 0x0c, 0x27, 0x39, 0x61, 0x5d, - 0xc2, 0x56, 0xd8, 0x96, 0xdd, 0x89, 0xea, 0xfb, 0x66, 0xc1, 0xdc, 0x98, 0x5d, 0x7a, 0x86, 0x5e, - 0x83, 0x53, 0xae, 0x5a, 0xde, 0xcb, 0xb1, 0xf2, 0x53, 0x7d, 0xbf, 0x82, 0x5e, 0x36, 0x13, 0x8d, - 0xcc, 0xa2, 0xa3, 0x72, 0x44, 0xe9, 0x28, 0xb3, 0xdb, 0x30, 0xaf, 0x14, 0x87, 0xda, 0xe5, 0x99, - 0xf0, 0x77, 0x00, 0xa2, 0x8c, 0x7c, 0x95, 0x88, 0x4c, 0x3f, 0x04, 0xf7, 0x49, 0xb9, 0x52, 0xe9, - 0x54, 0xbb, 0xac, 0x69, 0x6d, 0xbb, 0xa6, 0x37, 0x20, 0xf4, 0x88, 0xc8, 0x00, 0x47, 0x14, 0xe0, - 0x62, 0x35, 0xc0, 0x55, 0x3d, 0xce, 0x2e, 0xcc, 0x81, 0x8e, 0xc1, 0xd1, 0x75, 0x9f, 0x04, 0x6d, - 0xae, 0x34, 0x19, 0xb7, 0xd3, 0x1e, 0xfe, 0x14, 0xc0, 0xff, 0x65, 0xc8, 0x4d, 0x9f, 0x8b, 0x6a, - 0x3e, 0x6f, 0xc1, 0x89, 0xc0, 0xe7, 0x1a, 0x30, 0x71, 0x7b, 0xa3, 0x1a, 0x60, 0x33, 0x1f, 0x68, - 0x17, 0x67, 0x29, 0x20, 0x8e, 0x94, 0x10, 0x3d, 0xf8, 0x7f, 0xbd, 0x1d, 0x08, 0xef, 0xac, 0x85, - 0xfe, 0x2e, 0x94, 0x35, 0xe0, 0x58, 0x48, 0x42, 0xea, 0xbf, 0x45, 0xda, 0xca, 0xcc, 0x98, 0xad, - 0xfb, 0xf8, 0x73, 0x00, 0x8f, 0xe4, 0x96, 0x04, 0xdb, 0xda, 0xb9, 0x99, 0xf3, 0xf0, 0x30, 0x23, - 0x5c, 0x38, 0x4c, 0xb4, 0x3a, 0xae, 0x4b, 0x38, 0x5f, 0xef, 0x04, 0xa9, 0xbd, 0xde, 0x17, 0xf2, - 0xeb, 0x88, 0xb6, 0xc9, 0xf3, 0x72, 0xbd, 0x2d, 0x12, 0x10, 0x57, 0x50, 0x96, 0xfa, 0xa9, 0xf7, - 0x05, 0xbe, 0x95, 0x9f, 0x53, 0xa9, 0x47, 0x48, 0x76, 0x85, 0xd9, 0x6b, 0x78, 0xe4, 0x41, 0x86, - 0x9b, 0xb0, 0x9e, 0x19, 0x7e, 0x85, 0xb0, 0xd0, 0x8f, 0x0a, 0x31, 0xe2, 0x1f, 0xdb, 0xc6, 0x1f, - 0x15, 0x76, 0x5e, 0x4b, 0xd0, 0xf8, 0x5f, 0x5a, 0x05, 0xaa, 0xc3, 0x03, 0x21, 0xe1, 0xdc, 0xf1, - 0x48, 0x2a, 0x71, 0xd6, 0xc5, 0x77, 0x0b, 0xc7, 0xb7, 0xb5, 0x9b, 0xe3, 0x3b, 0x24, 0x20, 0x74, - 0x04, 0xee, 0x8f, 0x37, 0x1c, 0x4e, 0x54, 0x88, 0x1a, 0xb7, 0x93, 0x0e, 0x9a, 0x87, 0x87, 0x68, - 0x47, 0xc4, 0x1d, 0x71, 0xc3, 0x61, 0x4e, 0x48, 0x04, 0x61, 0xbc, 0x3e, 0xaa, 0x3e, 0xe8, 0x79, - 0x8e, 0xaf, 0xc1, 0x63, 0x7a, 0x45, 0x1d, 0x1e, 0x93, 0xa8, 0xbd, 0x73, 0x87, 0xdd, 0x2b, 0xc8, - 0xd3, 0xa4, 0xde, 0xce, 0xe5, 0xa9, 0xc3, 0x03, 0x31, 0x6d, 0x5f, 0x97, 0x83, 0x12, 0x51, 0xb2, - 0x2e, 0x7a, 0x0e, 0xc2, 0x80, 0x7a, 0x59, 0x58, 0xd9, 0xa7, 0xc2, 0xca, 0xa9, 0x42, 0x58, 0x31, - 0x65, 0xf2, 0x92, 0x41, 0xe4, 0x06, 0x6d, 0x37, 0xf5, 0x87, 0x76, 0x61, 0x90, 0xc4, 0xf1, 0x18, - 0x89, 0x53, 0xc9, 0x54, 0x5b, 0x1e, 0x7a, 0x9e, 0xb9, 0x21, 0x51, 0x4a, 0xf7, 0xe5, 0xa1, 0xd7, - 0xc7, 0x69, 0x85, 0x04, 0x64, 0x17, 0x5b, 0x5a, 0xa6, 0x96, 0xb6, 0x9a, 0xa2, 0x1c, 0xb9, 0x2b, - 0xa6, 0x96, 0x95, 0xe2, 0x50, 0xbb, 0x3c, 0x13, 0xae, 0xe7, 0x8e, 0xcc, 0x28, 0x79, 0x4c, 0x23, - 0x4e, 0xf0, 0x67, 0x72, 0x01, 0x8e, 0x70, 0x37, 0xb2, 0xf7, 0xfc, 0x21, 0x8c, 0xe1, 0x1f, 0x16, - 0xf6, 0x8e, 0x82, 0xbd, 0xda, 0x25, 0x91, 0x92, 0x58, 0x6c, 0xc5, 0x5a, 0x62, 0xd9, 0x46, 0x6b, - 0x70, 0x94, 0xae, 0xdd, 0x24, 0xae, 0xd8, 0x83, 0x6a, 0x22, 0x9d, 0x19, 0xbf, 0x2f, 0x71, 0x34, - 0xc6, 0x7f, 0x28, 0x18, 0x7e, 0x16, 0x8e, 0x35, 0xa9, 0x77, 0x35, 0x12, 0x6c, 0x4b, 0x9e, 0x0b, - 0x97, 0x46, 0x82, 0x44, 0x22, 0x35, 0x9e, 0x75, 0x8b, 0x27, 0xa6, 0x56, 0x3a, 0x31, 0xf8, 0x93, - 0x52, 0xfe, 0x8e, 0xc4, 0x43, 0x55, 0xb3, 0xe1, 0x3f, 0x0b, 0x87, 0xab, 0x55, 0xca, 0xdc, 0x83, - 0xf9, 0x30, 0x9c, 0x64, 0x84, 0xd3, 0x0e, 0x73, 0xc9, 0x8b, 0x7e, 0xd4, 0x4e, 0x17, 0x5d, 0x7a, - 0x56, 0xfc, 0xa6, 0x10, 0x4a, 0x4a, 0xcf, 0x10, 0x83, 0x53, 0x49, 0xc1, 0x50, 0x0e, 0x29, 0xcd, - 0xdd, 0x2f, 0xb6, 0x95, 0x4d, 0xcb, 0xed, 0xb2, 0x89, 0xa5, 0xdf, 0x8e, 0xc2, 0x83, 0x79, 0x16, - 0x61, 0x5d, 0xdf, 0x25, 0xe8, 0x4b, 0x00, 0xa7, 0x93, 0xca, 0x31, 0x7b, 0x83, 0x4e, 0xe6, 0x93, - 0xf6, 0xad, 0xba, 0x8d, 0x21, 0x7a, 0x04, 0xcf, 0xbd, 0x77, 0xef, 0x8f, 0x8f, 0x6b, 0x18, 0x9f, - 0x50, 0x37, 0x80, 0x6e, 0xc3, 0xca, 0x6f, 0x11, 0xb7, 0xb5, 0xea, 0x77, 0x9e, 0x06, 0xf3, 0xe8, - 0x0b, 0x00, 0x27, 0x56, 0x89, 0xd0, 0x98, 0xc7, 0x7b, 0x31, 0xf3, 0xca, 0x76, 0xa8, 0x8c, 0xe7, - 0x15, 0xe3, 0x63, 0xe8, 0xd1, 0x81, 0x8c, 0x49, 0xfb, 0x8e, 0xe4, 0x9c, 0x92, 0x87, 0x4a, 0x07, - 0x3d, 0x74, 0xa2, 0x97, 0xb4, 0x50, 0xd0, 0x1a, 0xd7, 0x87, 0x87, 0x2a, 0xa7, 0xc5, 0x67, 0x14, - 0xee, 0x49, 0x34, 0x58, 0x52, 0xf4, 0x0e, 0x9c, 0x2e, 0x07, 0xe7, 0x92, 0xe3, 0xfb, 0x85, 0x6d, - 0xa3, 0x8f, 0xe4, 0x79, 0xac, 0xc2, 0xe7, 0x94, 0xdd, 0x33, 0xe8, 0xf4, 0x76, 0xbb, 0x0b, 0x44, - 0xc5, 0xb2, 0xa2, 0xf5, 0x45, 0x80, 0x38, 0x9c, 0x28, 0x04, 0xba, 0x92, 0x3b, 0x7b, 0xe2, 0x9f, - 0xf1, 0x48, 0xbf, 0x54, 0x9b, 0x98, 0x3d, 0xab, 0xcc, 0x9e, 0x46, 0xa7, 0x32, 0xb3, 0x5c, 0x30, - 0xe2, 0x84, 0x56, 0x5f, 0xa3, 0xef, 0x02, 0x38, 0x9d, 0x64, 0xa9, 0x41, 0xdb, 0xbd, 0x94, 0x6d, - 0x8d, 0xd9, 0x07, 0x7f, 0x90, 0x26, 0xba, 0x74, 0x83, 0xcc, 0x57, 0xdb, 0x20, 0xdf, 0x03, 0x38, - 0xa5, 0x8a, 0x78, 0x8d, 0x30, 0xd3, 0x6b, 0xa1, 0x58, 0xe5, 0x0f, 0x75, 0x33, 0x3f, 0xa1, 0x58, - 0x2d, 0x63, 0xbe, 0x0a, 0xab, 0xc5, 0x24, 0x86, 0x3c, 0x7d, 0x3f, 0x01, 0x78, 0x28, 0xbb, 0xe3, - 0x68, 0xee, 0x53, 0xfd, 0xb8, 0x4b, 0xf7, 0xa0, 0xa1, 0xa2, 0x5f, 0x54, 0xe8, 0x4b, 0xc6, 0x42, - 0x45, 0xf4, 0x84, 0x44, 0xd2, 0xff, 0x00, 0xe0, 0x74, 0x72, 0x23, 0x19, 0xe4, 0xf6, 0xd2, 0x9d, - 0x65, 0xa8, 0xe4, 0x4f, 0x2a, 0xf2, 0x45, 0xe3, 0x5c, 0x65, 0xf2, 0x90, 0x48, 0xee, 0x1f, 0x01, - 0x3c, 0x98, 0x56, 0xc7, 0x1a, 0xbc, 0xcf, 0x76, 0x2c, 0x17, 0xd0, 0x43, 0x25, 0x7f, 0x4a, 0x91, - 0x37, 0x8c, 0xf3, 0x95, 0xc8, 0x79, 0x02, 0x22, 0xd1, 0x7f, 0x06, 0xf0, 0xb0, 0xbe, 0x8b, 0x69, - 0x78, 0xdc, 0x0b, 0xbf, 0xfd, 0xc2, 0x36, 0x54, 0xfc, 0x4b, 0x0a, 0x7f, 0xd9, 0x30, 0x2b, 0xe1, - 0x8b, 0x0c, 0x45, 0x2e, 0xe0, 0x5b, 0x00, 0x27, 0xe5, 0xed, 0x4f, 0xb3, 0xf7, 0x09, 0xe3, 0x85, - 0xdb, 0xe1, 0x50, 0xb1, 0x2f, 0x28, 0x6c, 0xd3, 0x38, 0x5b, 0x4d, 0x75, 0x41, 0x63, 0x49, 0xfc, - 0x35, 0x80, 0x13, 0xad, 0xc1, 0x19, 0xb2, 0xb5, 0x37, 0x19, 0x72, 0x59, 0xf1, 0x2e, 0x18, 0x73, - 0xd5, 0x78, 0x89, 0x3a, 0x94, 0x5f, 0x01, 0x38, 0x29, 0x0b, 0xc3, 0x41, 0x02, 0x17, 0x0a, 0xc7, - 0xa1, 0x02, 0x2f, 0x28, 0xe0, 0xc7, 0x31, 0x1e, 0x0c, 0x1c, 0xf8, 0x91, 0x42, 0x7d, 0x1b, 0x1e, - 0x48, 0xee, 0x75, 0xbc, 0x9f, 0xa8, 0xf9, 0x95, 0xd3, 0x40, 0xf9, 0xdb, 0xac, 0x78, 0xc6, 0xcf, - 0x28, 0x5b, 0x17, 0xd0, 0x52, 0x25, 0x71, 0x6e, 0xa7, 0xf5, 0xf3, 0x1d, 0x2b, 0xa0, 0xde, 0x07, - 0x35, 0xb0, 0x08, 0x90, 0x80, 0x93, 0x05, 0x53, 0x3b, 0x41, 0x58, 0x54, 0x08, 0xf3, 0xa8, 0x9a, - 0x7f, 0x02, 0xea, 0x2d, 0x02, 0xf4, 0x0d, 0x80, 0xd3, 0xad, 0x72, 0xbc, 0x3f, 0xd9, 0x2f, 0xf4, - 0xec, 0x55, 0xb4, 0xb7, 0x14, 0xf3, 0x59, 0xfc, 0x37, 0x49, 0x55, 0x07, 0xf9, 0xcb, 0xab, 0xbf, - 0xde, 0x9f, 0x01, 0x77, 0xef, 0xcf, 0x80, 0xdf, 0xef, 0xcf, 0x80, 0xd7, 0x2f, 0x55, 0xff, 0x4f, - 0xbd, 0xed, 0x7f, 0xfa, 0xda, 0xa8, 0xfa, 0xed, 0xbc, 0xfc, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, - 0xd5, 0x18, 0x9f, 0xd1, 0x70, 0x17, 0x00, 0x00, + // 1442 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0xcd, 0x6f, 0x1c, 0xb5, + 0x1b, 0xc7, 0xe5, 0xa4, 0x4d, 0x13, 0xe7, 0xa5, 0xad, 0x7f, 0x6d, 0x7f, 0xcb, 0xa8, 0x4d, 0x53, + 0x97, 0x42, 0x9a, 0x36, 0x33, 0x79, 0x29, 0xd0, 0x22, 0x81, 0x44, 0x9b, 0x12, 0x51, 0x96, 0x52, + 0xcd, 0x22, 0x21, 0xb8, 0xa0, 0xc9, 0xec, 0x93, 0xc9, 0x34, 0x3b, 0xe3, 0xc1, 0xf6, 0x6e, 0x15, + 0x4a, 0x91, 0xe0, 0x02, 0x07, 0x24, 0x0e, 0x1c, 0xb9, 0x20, 0x24, 0x04, 0x07, 0x04, 0x08, 0x09, + 0x09, 0x81, 0x84, 0x38, 0x70, 0xe0, 0x58, 0xa9, 0x57, 0x0e, 0xa8, 0xe2, 0x1f, 0xe0, 0x3f, 0x40, + 0xf6, 0xbc, 0x67, 0xb7, 0xdb, 0x21, 0xd9, 0x42, 0x6f, 0x63, 0xcf, 0xd8, 0xcf, 0xc7, 0xdf, 0xc7, + 0x7e, 0x9e, 0xc7, 0x83, 0x4f, 0x45, 0x9b, 0x9e, 0xe5, 0x44, 0xbe, 0xdb, 0xf2, 0x21, 0x94, 0xd6, + 0x0d, 0xc6, 0x37, 0xd7, 0x5b, 0xec, 0x46, 0xf6, 0x60, 0x46, 0x9c, 0x49, 0x46, 0x46, 0xd3, 0xb6, + 0x71, 0xd4, 0x63, 0xcc, 0x6b, 0x81, 0x1a, 0x63, 0x39, 0x61, 0xc8, 0xa4, 0x23, 0x7d, 0x16, 0x8a, + 0xf8, 0x3b, 0xe3, 0xdc, 0xe6, 0x79, 0x61, 0xfa, 0x4c, 0xbd, 0x0d, 0x1c, 0x77, 0xc3, 0x0f, 0x81, + 0x6f, 0x59, 0x89, 0x09, 0x61, 0x05, 0x20, 0x1d, 0xab, 0xb3, 0x68, 0x79, 0x10, 0x02, 0x77, 0x24, + 0x34, 0x93, 0x51, 0x2f, 0x79, 0xbe, 0xdc, 0x68, 0xaf, 0x99, 0x2e, 0x0b, 0x2c, 0x87, 0x7b, 0x2c, + 0xe2, 0xec, 0xba, 0x7e, 0x98, 0x4f, 0xcd, 0x8a, 0x7c, 0x92, 0x0c, 0xb1, 0xb3, 0xe8, 0xb4, 0xa2, + 0x0d, 0xa7, 0x7b, 0x3a, 0x9a, 0x43, 0x58, 0x2e, 0xe3, 0xd0, 0xc3, 0x24, 0xfd, 0x65, 0x08, 0x1f, + 0x7e, 0x35, 0x99, 0xe9, 0x12, 0x07, 0x47, 0x82, 0x0d, 0x6f, 0xb6, 0x41, 0x48, 0x72, 0x14, 0x8f, + 0x85, 0x4e, 0x00, 0x22, 0x72, 0x5c, 0xa8, 0xa1, 0x19, 0x34, 0x3b, 0x66, 0xe7, 0x1d, 0x64, 0x1d, + 0x67, 0x52, 0xd4, 0x86, 0x66, 0xd0, 0xec, 0xf8, 0xd2, 0x15, 0x33, 0xa7, 0x37, 0x53, 0x7a, 0xfd, + 0xf0, 0x46, 0x46, 0x6f, 0x76, 0x96, 0xcd, 0x68, 0xd3, 0x33, 0xd5, 0x02, 0xcc, 0x4c, 0xda, 0x74, + 0x01, 0x66, 0x0a, 0x62, 0x67, 0x73, 0x13, 0x8a, 0xb1, 0x1f, 0x0a, 0xe9, 0x84, 0x2e, 0xbc, 0xb0, + 0x52, 0x1b, 0x56, 0x18, 0x17, 0x87, 0x6a, 0xc8, 0x2e, 0xf4, 0x12, 0x8a, 0x27, 0x04, 0xf0, 0x0e, + 0xf0, 0x15, 0xbe, 0x65, 0xb7, 0xc3, 0xda, 0x9e, 0x19, 0x34, 0x3b, 0x6a, 0x97, 0xfa, 0xc8, 0x6b, + 0x78, 0xd2, 0xd5, 0xcb, 0x7b, 0x39, 0xd2, 0x7e, 0xaa, 0xed, 0xd5, 0xd0, 0xcb, 0x66, 0xac, 0x91, + 0x59, 0x74, 0x54, 0x8e, 0xa8, 0x1c, 0x65, 0x76, 0x16, 0xcd, 0x4b, 0xc5, 0xa1, 0x76, 0x79, 0x26, + 0xfa, 0x2d, 0xc2, 0x24, 0x25, 0x5f, 0x05, 0x99, 0xea, 0x47, 0xf0, 0x1e, 0x25, 0x57, 0x22, 0x9d, + 0x7e, 0x2e, 0x6b, 0x3a, 0xb4, 0x5d, 0xd3, 0x6b, 0x18, 0x7b, 0x20, 0x53, 0xc0, 0x61, 0x0d, 0xb8, + 0x50, 0x0d, 0x70, 0x35, 0x1b, 0x67, 0x17, 0xe6, 0x20, 0x47, 0xf0, 0xc8, 0xba, 0x0f, 0xad, 0xa6, + 0xd0, 0x9a, 0x8c, 0xd9, 0x49, 0x8b, 0x7e, 0x8a, 0xf0, 0xff, 0x52, 0xe4, 0xba, 0x2f, 0x64, 0x35, + 0x9f, 0x37, 0xf0, 0x78, 0xcb, 0x17, 0x19, 0x60, 0xec, 0xf6, 0xc5, 0x6a, 0x80, 0xf5, 0x7c, 0xa0, + 0x5d, 0x9c, 0xa5, 0x80, 0x38, 0x5c, 0x42, 0x7c, 0x1f, 0xe1, 0xff, 0x67, 0xfb, 0x01, 0x44, 0x7b, + 0x2d, 0xf0, 0x77, 0x21, 0xad, 0x81, 0x47, 0x03, 0x08, 0x98, 0xff, 0x16, 0x34, 0xb5, 0x9d, 0x51, + 0x3b, 0x6b, 0x93, 0x69, 0x8c, 0x23, 0x87, 0x3b, 0x01, 0x48, 0xe0, 0x6a, 0x5f, 0x0c, 0xcf, 0x8e, + 0xd9, 0x85, 0x1e, 0xfa, 0x2b, 0xc2, 0x87, 0x72, 0x12, 0xc9, 0xb7, 0x76, 0x8e, 0x71, 0x16, 0x1f, + 0xe4, 0x20, 0xa4, 0xc3, 0x65, 0xa3, 0xed, 0xba, 0x20, 0xc4, 0x7a, 0xbb, 0x95, 0xf0, 0x74, 0xbf, + 0x50, 0x5f, 0x87, 0xac, 0x09, 0xcf, 0x2b, 0x41, 0x1a, 0xd0, 0x02, 0x57, 0x32, 0x9e, 0x38, 0xb2, + 0xfb, 0xc5, 0x7d, 0x97, 0x71, 0x23, 0x3f, 0xe8, 0x4a, 0xcf, 0x00, 0x76, 0xb5, 0x8c, 0x6e, 0xb0, + 0xe1, 0x7b, 0x80, 0xd1, 0x3a, 0xae, 0xa5, 0x86, 0x5f, 0x01, 0x1e, 0xf8, 0x61, 0x21, 0xc8, 0xfc, + 0x63, 0xdb, 0xf4, 0xa3, 0xc2, 0xd6, 0x6d, 0x48, 0x16, 0xfd, 0x4b, 0xab, 0x20, 0x35, 0xbc, 0x2f, + 0x00, 0x21, 0x1c, 0x0f, 0x12, 0x17, 0xa4, 0x4d, 0x7a, 0xbb, 0x70, 0xfe, 0x1b, 0xbb, 0x39, 0xff, + 0x03, 0x02, 0x22, 0x87, 0xf0, 0xde, 0x68, 0xc3, 0x11, 0xa0, 0x63, 0xdc, 0x98, 0x1d, 0x37, 0xc8, + 0x1c, 0x3e, 0xc0, 0xda, 0x32, 0x6a, 0xcb, 0x6b, 0xf9, 0x2e, 0x19, 0xd1, 0x1f, 0x74, 0xf5, 0xd3, + 0x2b, 0xf8, 0x48, 0xb6, 0xa2, 0xb6, 0x88, 0x20, 0x6c, 0xee, 0xdc, 0x61, 0x77, 0x0a, 0xf2, 0xd4, + 0x99, 0xb7, 0x73, 0x79, 0x6a, 0x78, 0x5f, 0xc4, 0x9a, 0x57, 0xd5, 0xa0, 0x58, 0x94, 0xb4, 0x49, + 0x9e, 0xc3, 0xb8, 0xc5, 0xbc, 0x34, 0x2e, 0xed, 0xd1, 0x71, 0xe9, 0x44, 0x21, 0x2e, 0x99, 0x2a, + 0xfb, 0xa9, 0x28, 0x74, 0x8d, 0x35, 0xeb, 0xd9, 0x87, 0x76, 0x61, 0x90, 0xc2, 0xf1, 0x38, 0x44, + 0x89, 0x64, 0xfa, 0x59, 0x05, 0x0d, 0x91, 0xba, 0x21, 0x56, 0x2a, 0x6b, 0xd3, 0x1f, 0x51, 0x7e, + 0x9c, 0x56, 0xa0, 0x05, 0xbb, 0xd8, 0xd2, 0x2a, 0x37, 0x35, 0xf5, 0x14, 0xe5, 0xd0, 0x5f, 0x31, + 0x37, 0xad, 0x14, 0x87, 0xda, 0xe5, 0x99, 0xd4, 0x56, 0x58, 0x67, 0xdc, 0x85, 0x24, 0x27, 0xc6, + 0x0d, 0x5a, 0xcb, 0xdd, 0x9b, 0xb2, 0x8b, 0x88, 0x85, 0x02, 0xe8, 0x67, 0x6a, 0x59, 0x8e, 0x74, + 0x37, 0xd2, 0xf7, 0xe2, 0x21, 0x4c, 0x0d, 0x1f, 0x16, 0x76, 0x94, 0x86, 0xbd, 0xdc, 0x81, 0x50, + 0x0b, 0x2f, 0xb7, 0xa2, 0x4c, 0x78, 0xf5, 0x4c, 0xd6, 0xf0, 0x08, 0x5b, 0xbb, 0x0e, 0xae, 0x7c, + 0x00, 0x45, 0x4a, 0x32, 0xb3, 0xca, 0x54, 0x24, 0xc7, 0xf8, 0x0f, 0x05, 0xa3, 0xcf, 0xe2, 0xd1, + 0x3a, 0xf3, 0x2e, 0x87, 0x92, 0x6f, 0xa9, 0xd3, 0xe2, 0xb2, 0x50, 0x42, 0x28, 0x13, 0xe3, 0x69, + 0xb3, 0x78, 0x8e, 0x86, 0x4a, 0xe7, 0x88, 0x7e, 0x52, 0x2a, 0x0b, 0x42, 0xf9, 0x50, 0x95, 0x82, + 0xf4, 0xaf, 0xc2, 0x91, 0x6b, 0x94, 0xea, 0x81, 0xfe, 0x7c, 0x14, 0x4f, 0x70, 0x10, 0xac, 0xcd, + 0x5d, 0x78, 0xd1, 0x0f, 0x9b, 0xc9, 0xa2, 0x4b, 0x7d, 0xc5, 0x6f, 0x0a, 0x01, 0xa6, 0xd4, 0x47, + 0x38, 0x9e, 0x8c, 0xcb, 0x90, 0x72, 0xa0, 0xa9, 0xef, 0x7e, 0xb1, 0x8d, 0x74, 0x5a, 0x61, 0x97, + 0x4d, 0x2c, 0xfd, 0x7e, 0x18, 0xef, 0xcf, 0x73, 0x0b, 0xef, 0xf8, 0x2e, 0x90, 0x2f, 0x10, 0x9e, + 0x8a, 0x0b, 0xd2, 0xf4, 0x0d, 0x39, 0x9e, 0x4f, 0xda, 0xb3, 0x98, 0x37, 0x06, 0xe8, 0x11, 0x3a, + 0xfb, 0xde, 0x9d, 0x3f, 0x3f, 0x1e, 0xa2, 0xf4, 0x98, 0xbe, 0x58, 0x74, 0x16, 0xad, 0xfc, 0x72, + 0x72, 0x33, 0x53, 0xfd, 0xd6, 0xd3, 0x68, 0x8e, 0x7c, 0x8e, 0xf0, 0xf8, 0x2a, 0xc8, 0x0c, 0xf3, + 0x68, 0x37, 0x66, 0x5e, 0x30, 0x0f, 0x94, 0xf1, 0xac, 0x66, 0x7c, 0x8c, 0x3c, 0xda, 0x97, 0x31, + 0x7e, 0xbe, 0xa5, 0x38, 0x27, 0xd5, 0xa1, 0xca, 0x82, 0x1e, 0x39, 0xd6, 0x4d, 0x5a, 0xa8, 0x93, + 0x8d, 0xab, 0x83, 0x43, 0x55, 0xd3, 0xd2, 0x53, 0x1a, 0xf7, 0x38, 0xe9, 0x2f, 0x29, 0x79, 0x07, + 0x4f, 0x95, 0x83, 0x73, 0xc9, 0xf1, 0xbd, 0xc2, 0xb6, 0xd1, 0x43, 0xf2, 0x3c, 0x56, 0xd1, 0x33, + 0xda, 0xee, 0x29, 0x72, 0x72, 0xbb, 0xdd, 0x79, 0xd0, 0xb1, 0xac, 0x68, 0x7d, 0x01, 0x11, 0x81, + 0xc7, 0x0b, 0x81, 0xae, 0xe4, 0xce, 0xae, 0xf8, 0x67, 0x3c, 0xd2, 0x2b, 0x01, 0xc7, 0x66, 0x4f, + 0x6b, 0xb3, 0x27, 0xc9, 0x89, 0xd4, 0xac, 0x90, 0x1c, 0x9c, 0xc0, 0xea, 0x69, 0xf4, 0x5d, 0x84, + 0xa7, 0xe2, 0x2c, 0xd5, 0x6f, 0xbb, 0x97, 0x72, 0xb0, 0x31, 0x73, 0xef, 0x0f, 0x92, 0x44, 0x97, + 0x6c, 0x90, 0xb9, 0x6a, 0x1b, 0xe4, 0x3b, 0x84, 0x27, 0x75, 0xe9, 0x9f, 0x21, 0x4c, 0x77, 0x5b, + 0x28, 0xde, 0x0d, 0x06, 0xba, 0x99, 0x9f, 0xd0, 0xac, 0x96, 0x31, 0x57, 0x85, 0xd5, 0xe2, 0x0a, + 0x43, 0x9d, 0xbe, 0x9f, 0x10, 0x3e, 0x90, 0xde, 0x9c, 0x32, 0xee, 0x13, 0xbd, 0xb8, 0x4b, 0xb7, + 0xab, 0x81, 0xa2, 0x9f, 0xd7, 0xe8, 0x4b, 0xc6, 0x7c, 0x45, 0xf4, 0x98, 0x44, 0xd1, 0x7f, 0x8f, + 0xf0, 0x54, 0x7c, 0x4f, 0xe9, 0xe7, 0xf6, 0xd2, 0x4d, 0x66, 0xa0, 0xe4, 0x4f, 0x6a, 0xf2, 0x05, + 0xe3, 0x4c, 0x65, 0xf2, 0x00, 0x14, 0xf7, 0x0f, 0x08, 0xef, 0x4f, 0x6a, 0xe6, 0x0c, 0xbc, 0xc7, + 0x76, 0x2c, 0x97, 0xd5, 0x03, 0x25, 0x7f, 0x4a, 0x93, 0x2f, 0x1a, 0x67, 0x2b, 0x91, 0x8b, 0x18, + 0x44, 0xa1, 0xff, 0x8c, 0xf0, 0xc1, 0xec, 0x86, 0x96, 0xc1, 0xd3, 0x6e, 0xf8, 0xed, 0xd7, 0xb8, + 0x81, 0xe2, 0x5f, 0xd0, 0xf8, 0xcb, 0x86, 0x59, 0x09, 0x5f, 0xa6, 0x28, 0x6a, 0x01, 0xdf, 0x20, + 0x3c, 0xa1, 0xee, 0x84, 0x19, 0x7b, 0x8f, 0x30, 0x5e, 0xb8, 0x33, 0x0e, 0x14, 0xfb, 0x9c, 0xc6, + 0x36, 0x8d, 0xd3, 0xd5, 0x54, 0x97, 0x2c, 0x52, 0xc4, 0x5f, 0x21, 0x3c, 0xde, 0xe8, 0x9f, 0x21, + 0x1b, 0x0f, 0x26, 0x43, 0x2e, 0x6b, 0xde, 0x79, 0x63, 0xb6, 0x1a, 0x2f, 0xe8, 0x43, 0xf9, 0x25, + 0xc2, 0x13, 0xaa, 0x30, 0xec, 0x27, 0x70, 0xa1, 0x70, 0x1c, 0x28, 0xf0, 0xbc, 0x06, 0x7e, 0x9c, + 0xd2, 0xfe, 0xc0, 0x2d, 0x3f, 0xd4, 0xa8, 0x6f, 0xe3, 0x7d, 0xf1, 0x6d, 0x4f, 0xf4, 0x12, 0x35, + 0xbf, 0x88, 0x1a, 0x24, 0x7f, 0x9b, 0x16, 0xcf, 0xf4, 0x19, 0x6d, 0xeb, 0x1c, 0x59, 0xaa, 0x24, + 0xce, 0xcd, 0xa4, 0x7e, 0xbe, 0x65, 0xb5, 0x98, 0xf7, 0xc1, 0x10, 0x5a, 0x40, 0x44, 0xe2, 0x89, + 0x82, 0xa9, 0x9d, 0x20, 0x2c, 0x68, 0x84, 0x39, 0x52, 0xcd, 0x3f, 0x2d, 0xe6, 0x2d, 0x20, 0xf2, + 0x35, 0xc2, 0x53, 0x8d, 0x72, 0xbc, 0x3f, 0xde, 0x2b, 0xf4, 0x3c, 0xa8, 0x68, 0x6f, 0x69, 0xe6, + 0xd3, 0xf4, 0x3e, 0x49, 0x35, 0x0b, 0xf2, 0x17, 0x57, 0x7f, 0xbb, 0x3b, 0x8d, 0x6e, 0xdf, 0x9d, + 0x46, 0x7f, 0xdc, 0x9d, 0x46, 0xaf, 0x5f, 0xa8, 0xfe, 0xfb, 0x7b, 0xdb, 0x6f, 0xfa, 0xb5, 0x11, + 0xfd, 0x37, 0x7b, 0xf9, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6d, 0x5c, 0x66, 0x41, 0xc7, 0x17, + 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2360,6 +2386,15 @@ func (m *WorkflowResubmitRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.Parameters) > 0 { + for iNdEx := len(m.Parameters) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Parameters[iNdEx]) + copy(dAtA[i:], m.Parameters[iNdEx]) + i = encodeVarintWorkflow(dAtA, i, uint64(len(m.Parameters[iNdEx]))) + i-- + dAtA[i] = 0x2a + } + } if m.Memoized { i-- if m.Memoized { @@ -2411,6 +2446,15 @@ func (m *WorkflowRetryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.Parameters) > 0 { + for iNdEx := len(m.Parameters) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Parameters[iNdEx]) + copy(dAtA[i:], m.Parameters[iNdEx]) + i = encodeVarintWorkflow(dAtA, i, uint64(len(m.Parameters[iNdEx]))) + i-- + dAtA[i] = 0x2a + } + } if len(m.NodeFieldSelector) > 0 { i -= len(m.NodeFieldSelector) copy(dAtA[i:], m.NodeFieldSelector) @@ -2797,6 +2841,16 @@ func (m *WorkflowDeleteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if m.Force { + i-- + if m.Force { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x20 + } if m.DeleteOptions != nil { { size, err := m.DeleteOptions.MarshalToSizedBuffer(dAtA[:i]) @@ -3256,6 +3310,12 @@ func (m *WorkflowResubmitRequest) Size() (n int) { if m.Memoized { n += 2 } + if len(m.Parameters) > 0 { + for _, s := range m.Parameters { + l = len(s) + n += 1 + l + sovWorkflow(uint64(l)) + } + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3283,6 +3343,12 @@ func (m *WorkflowRetryRequest) Size() (n int) { if l > 0 { n += 1 + l + sovWorkflow(uint64(l)) } + if len(m.Parameters) > 0 { + for _, s := range m.Parameters { + l = len(s) + n += 1 + l + sovWorkflow(uint64(l)) + } + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3471,6 +3537,9 @@ func (m *WorkflowDeleteRequest) Size() (n int) { l = m.DeleteOptions.Size() n += 1 + l + sovWorkflow(uint64(l)) } + if m.Force { + n += 2 + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -4281,6 +4350,38 @@ func (m *WorkflowResubmitRequest) Unmarshal(dAtA []byte) error { } } m.Memoized = bool(v != 0) + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Parameters", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowWorkflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthWorkflow + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthWorkflow + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Parameters = append(m.Parameters, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipWorkflow(dAtA[iNdEx:]) @@ -4448,6 +4549,38 @@ func (m *WorkflowRetryRequest) Unmarshal(dAtA []byte) error { } m.NodeFieldSelector = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Parameters", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowWorkflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthWorkflow + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthWorkflow + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Parameters = append(m.Parameters, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipWorkflow(dAtA[iNdEx:]) @@ -5645,6 +5778,26 @@ func (m *WorkflowDeleteRequest) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Force", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowWorkflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Force = bool(v != 0) default: iNdEx = preIndex skippy, err := skipWorkflow(dAtA[iNdEx:]) diff --git a/pkg/apiclient/workflow/workflow.proto b/pkg/apiclient/workflow/workflow.proto index 676d5b96dd6b..edfeaf171280 100644 --- a/pkg/apiclient/workflow/workflow.proto +++ b/pkg/apiclient/workflow/workflow.proto @@ -12,225 +12,227 @@ import "k8s.io/api/core/v1/generated.proto"; package workflow; message WorkflowCreateRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow workflow = 2; - // This field is no longer used. - string instanceID = 3 [deprecated=true]; - bool serverDryRun = 4; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 5; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow workflow = 2; + // This field is no longer used. + string instanceID = 3 [ deprecated = true ]; + bool serverDryRun = 4; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 5; } message WorkflowGetRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; - // Fields to be included or excluded in the response. e.g. "spec,status.phase", "-status.nodes" - string fields = 4; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; + // Fields to be included or excluded in the response. e.g. "spec,status.phase", "-status.nodes" + string fields = 4; } message WorkflowListRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; - // Fields to be included or excluded in the response. e.g. "items.spec,items.status.phase", "-items.status.nodes" - string fields = 3; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + // Fields to be included or excluded in the response. e.g. "items.spec,items.status.phase", "-items.status.nodes" + string fields = 3; } message WorkflowResubmitRequest { - string name = 1; - string namespace = 2; - bool memoized = 3; + string name = 1; + string namespace = 2; + bool memoized = 3; + repeated string parameters = 5; } message WorkflowRetryRequest { - string name = 1; - string namespace = 2; - bool restartSuccessful = 3; - string nodeFieldSelector = 4; + string name = 1; + string namespace = 2; + bool restartSuccessful = 3; + string nodeFieldSelector = 4; + repeated string parameters = 5; } message WorkflowResumeRequest { - string name = 1; - string namespace = 2; - string nodeFieldSelector = 3; + string name = 1; + string namespace = 2; + string nodeFieldSelector = 3; } message WorkflowTerminateRequest { - string name = 1; - string namespace = 2; + string name = 1; + string namespace = 2; } message WorkflowStopRequest { - string name = 1; - string namespace = 2; - string nodeFieldSelector = 3; - string message = 4; + string name = 1; + string namespace = 2; + string nodeFieldSelector = 3; + string message = 4; } message WorkflowSetRequest { - string name = 1; - string namespace = 2; - string nodeFieldSelector = 3; - string message = 4; - string phase = 5; - string outputParameters = 6; + string name = 1; + string namespace = 2; + string nodeFieldSelector = 3; + string message = 4; + string phase = 5; + string outputParameters = 6; } message WorkflowSuspendRequest { - string name = 1; - string namespace = 2; + string name = 1; + string namespace = 2; } message WorkflowLogRequest { - string name = 1; - string namespace = 2; - string podName = 3; - k8s.io.api.core.v1.PodLogOptions logOptions = 4; - string grep = 5; - string selector = 6; + string name = 1; + string namespace = 2; + string podName = 3; + k8s.io.api.core.v1.PodLogOptions logOptions = 4; + string grep = 5; + string selector = 6; } message WorkflowDeleteRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + bool force = 4; } message WorkflowDeleteResponse { } message WatchWorkflowsRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; - string fields = 3; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string fields = 3; } message WorkflowWatchEvent { - // the type of change - string type = 1; - // the workflow - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow object = 2; + // the type of change + string type = 1; + // the workflow + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow object = 2; } message WatchEventsRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } message LogEntry { - string content = 1; - string podName = 2; + string content = 1; + string podName = 2; } message WorkflowLintRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow workflow = 2; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow workflow = 2; } - message WorkflowSubmitRequest { - string namespace = 1; - string resourceKind = 2; - string resourceName = 3; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.SubmitOpts submitOptions = 4; + string namespace = 1; + string resourceKind = 2; + string resourceName = 3; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.SubmitOpts submitOptions = 4; } service WorkflowService { - rpc CreateWorkflow (WorkflowCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - post: "/api/v1/workflows/{namespace}" - body: "*" - }; - } - - rpc GetWorkflow (WorkflowGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}"; - } - - rpc ListWorkflows (WorkflowListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowList) { - option (google.api.http).get = "/api/v1/workflows/{namespace}"; - } - - rpc WatchWorkflows (WatchWorkflowsRequest) returns (stream WorkflowWatchEvent) { - option (google.api.http).get = "/api/v1/workflow-events/{namespace}"; - } - - rpc WatchEvents (WatchEventsRequest) returns (stream k8s.io.api.core.v1.Event) { - option (google.api.http).get = "/api/v1/stream/events/{namespace}"; - } - - rpc DeleteWorkflow (WorkflowDeleteRequest) returns (WorkflowDeleteResponse) { - option (google.api.http).delete = "/api/v1/workflows/{namespace}/{name}"; - } - - rpc RetryWorkflow (WorkflowRetryRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/retry" - body: "*" - }; - } - - rpc ResubmitWorkflow (WorkflowResubmitRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/resubmit" - body: "*" - }; - } - - rpc ResumeWorkflow (WorkflowResumeRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/resume" - body: "*" - }; - } - - rpc SuspendWorkflow (WorkflowSuspendRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/suspend" - body: "*" - }; - } - - rpc TerminateWorkflow (WorkflowTerminateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/terminate" - body: "*" - }; - } - - rpc StopWorkflow (WorkflowStopRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/stop" - body: "*" - }; - } - - rpc SetWorkflow (WorkflowSetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/workflows/{namespace}/{name}/set" - body: "*" - }; - } - - rpc LintWorkflow (WorkflowLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - post: "/api/v1/workflows/{namespace}/lint" - body: "*" - }; - } - - // DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. - rpc PodLogs (WorkflowLogRequest) returns (stream LogEntry) { - option deprecated = true; - option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}/{podName}/log"; - } - - rpc WorkflowLogs (WorkflowLogRequest) returns (stream LogEntry) { - option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}/log"; - } - - rpc SubmitWorkflow (WorkflowSubmitRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - post: "/api/v1/workflows/{namespace}/submit" - body: "*" - }; - } + rpc CreateWorkflow(WorkflowCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + post : "/api/v1/workflows/{namespace}" + body : "*" + }; + } + + rpc GetWorkflow(WorkflowGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}"; + } + + rpc ListWorkflows(WorkflowListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowList) { + option (google.api.http).get = "/api/v1/workflows/{namespace}"; + } + + rpc WatchWorkflows(WatchWorkflowsRequest) returns (stream WorkflowWatchEvent) { + option (google.api.http).get = "/api/v1/workflow-events/{namespace}"; + } + + rpc WatchEvents(WatchEventsRequest) returns (stream k8s.io.api.core.v1.Event) { + option (google.api.http).get = "/api/v1/stream/events/{namespace}"; + } + + rpc DeleteWorkflow(WorkflowDeleteRequest) returns (WorkflowDeleteResponse) { + option (google.api.http).delete = "/api/v1/workflows/{namespace}/{name}"; + } + + rpc RetryWorkflow(WorkflowRetryRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/retry" + body : "*" + }; + } + + rpc ResubmitWorkflow(WorkflowResubmitRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/resubmit" + body : "*" + }; + } + + rpc ResumeWorkflow(WorkflowResumeRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/resume" + body : "*" + }; + } + + rpc SuspendWorkflow(WorkflowSuspendRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/suspend" + body : "*" + }; + } + + rpc TerminateWorkflow(WorkflowTerminateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/terminate" + body : "*" + }; + } + + rpc StopWorkflow(WorkflowStopRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/stop" + body : "*" + }; + } + + rpc SetWorkflow(WorkflowSetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/workflows/{namespace}/{name}/set" + body : "*" + }; + } + + rpc LintWorkflow(WorkflowLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + post : "/api/v1/workflows/{namespace}/lint" + body : "*" + }; + } + + // DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. + rpc PodLogs(WorkflowLogRequest) returns (stream LogEntry) { + option deprecated = true; + option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}/{podName}/log"; + } + + rpc WorkflowLogs(WorkflowLogRequest) returns (stream LogEntry) { + option (google.api.http).get = "/api/v1/workflows/{namespace}/{name}/log"; + } + + rpc SubmitWorkflow(WorkflowSubmitRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + post : "/api/v1/workflows/{namespace}/submit" + body : "*" + }; + } } diff --git a/pkg/apiclient/workflowarchive/workflow-archive.pb.go b/pkg/apiclient/workflowarchive/workflow-archive.pb.go index 17683c6aa0cd..ef17a5cb9d90 100644 --- a/pkg/apiclient/workflowarchive/workflow-archive.pb.go +++ b/pkg/apiclient/workflowarchive/workflow-archive.pb.go @@ -311,6 +311,7 @@ type RetryArchivedWorkflowRequest struct { Namespace string `protobuf:"bytes,3,opt,name=namespace,proto3" json:"namespace,omitempty"` RestartSuccessful bool `protobuf:"varint,4,opt,name=restartSuccessful,proto3" json:"restartSuccessful,omitempty"` NodeFieldSelector string `protobuf:"bytes,5,opt,name=nodeFieldSelector,proto3" json:"nodeFieldSelector,omitempty"` + Parameters []string `protobuf:"bytes,6,rep,name=parameters,proto3" json:"parameters,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -384,11 +385,19 @@ func (m *RetryArchivedWorkflowRequest) GetNodeFieldSelector() string { return "" } +func (m *RetryArchivedWorkflowRequest) GetParameters() []string { + if m != nil { + return m.Parameters + } + return nil +} + type ResubmitArchivedWorkflowRequest struct { Uid string `protobuf:"bytes,1,opt,name=uid,proto3" json:"uid,omitempty"` Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` Namespace string `protobuf:"bytes,3,opt,name=namespace,proto3" json:"namespace,omitempty"` Memoized bool `protobuf:"varint,4,opt,name=memoized,proto3" json:"memoized,omitempty"` + Parameters []string `protobuf:"bytes,5,rep,name=parameters,proto3" json:"parameters,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -455,6 +464,13 @@ func (m *ResubmitArchivedWorkflowRequest) GetMemoized() bool { return false } +func (m *ResubmitArchivedWorkflowRequest) GetParameters() []string { + if m != nil { + return m.Parameters + } + return nil +} + func init() { proto.RegisterType((*ListArchivedWorkflowsRequest)(nil), "workflowarchive.ListArchivedWorkflowsRequest") proto.RegisterType((*GetArchivedWorkflowRequest)(nil), "workflowarchive.GetArchivedWorkflowRequest") @@ -471,54 +487,56 @@ func init() { } var fileDescriptor_95ca9a2d33e8bb19 = []byte{ - // 747 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x96, 0xcd, 0x6e, 0xd3, 0x4a, - 0x14, 0xc7, 0x35, 0x6d, 0xef, 0x55, 0x3b, 0x5d, 0xdc, 0xcb, 0xa0, 0x42, 0x64, 0xa5, 0x69, 0xb0, - 0xa0, 0x4d, 0x5b, 0x32, 0x6e, 0xda, 0x22, 0x50, 0x57, 0x80, 0x10, 0x48, 0xf4, 0x03, 0xe4, 0x48, - 0x20, 0xb1, 0x41, 0x13, 0xfb, 0x34, 0x19, 0xe2, 0x78, 0x8c, 0x67, 0xec, 0x52, 0x50, 0x17, 0xc0, - 0x23, 0xf4, 0x15, 0x78, 0x08, 0xc4, 0x82, 0x1d, 0x12, 0x2b, 0x84, 0x60, 0xc7, 0x0a, 0x55, 0x3c, - 0x08, 0xb2, 0x13, 0x27, 0x25, 0x71, 0x3e, 0x24, 0xd2, 0x55, 0xc6, 0x67, 0x8e, 0x8f, 0x7f, 0xff, - 0x93, 0xf9, 0x1f, 0x0d, 0xde, 0xf4, 0xea, 0x55, 0x83, 0x79, 0xdc, 0x72, 0x38, 0xb8, 0xca, 0x38, - 0x10, 0x7e, 0x7d, 0xdf, 0x11, 0x07, 0xcc, 0xb7, 0x6a, 0x3c, 0x84, 0xf6, 0x73, 0xb1, 0x15, 0xa0, - 0x9e, 0x2f, 0x94, 0x20, 0xff, 0x75, 0xe5, 0x69, 0xd9, 0xaa, 0x10, 0x55, 0x07, 0xa2, 0x4a, 0x06, - 0x73, 0x5d, 0xa1, 0x98, 0xe2, 0xc2, 0x95, 0xcd, 0x74, 0x6d, 0xb3, 0x7e, 0x43, 0x52, 0x2e, 0xa2, - 0xdd, 0x06, 0xb3, 0x6a, 0xdc, 0x05, 0xff, 0xd0, 0x68, 0x7d, 0x58, 0x1a, 0x0d, 0x50, 0xcc, 0x08, - 0x4b, 0x46, 0x15, 0x5c, 0xf0, 0x99, 0x02, 0xbb, 0xf5, 0xd6, 0x6e, 0x95, 0xab, 0x5a, 0x50, 0xa1, - 0x96, 0x68, 0x18, 0xcc, 0xaf, 0x0a, 0xcf, 0x17, 0xcf, 0xe2, 0x45, 0x31, 0xf9, 0xba, 0xec, 0x14, - 0x49, 0x42, 0x46, 0x58, 0x62, 0x8e, 0x57, 0x63, 0x3d, 0xe5, 0xf4, 0x63, 0x84, 0xb3, 0x3b, 0x5c, - 0xaa, 0x5b, 0x4d, 0x64, 0xfb, 0x71, 0x52, 0xc4, 0x84, 0xe7, 0x01, 0x48, 0x45, 0xca, 0x78, 0xd6, - 0xe1, 0x52, 0x3d, 0xf0, 0x62, 0xf4, 0x0c, 0xca, 0xa3, 0xc2, 0xec, 0x7a, 0x89, 0x36, 0xd9, 0xe9, - 0x69, 0x76, 0xea, 0xd5, 0xab, 0x51, 0x40, 0xd2, 0x88, 0x9d, 0x86, 0x25, 0xba, 0xd3, 0x79, 0xd1, - 0x3c, 0x5d, 0x85, 0xe4, 0x30, 0x76, 0x59, 0x03, 0x1e, 0xfa, 0xb0, 0xcf, 0x5f, 0x64, 0x26, 0xf2, - 0xa8, 0x30, 0x63, 0x9e, 0x8a, 0xe8, 0x14, 0x6b, 0xf7, 0xa0, 0x87, 0x29, 0x41, 0xfa, 0x1f, 0x4f, - 0x06, 0xdc, 0x8e, 0x51, 0x66, 0xcc, 0x68, 0xa9, 0x97, 0xf0, 0xfc, 0x1d, 0x70, 0x40, 0xc1, 0xe8, - 0xaf, 0x5c, 0xc2, 0x0b, 0xdd, 0xc9, 0xcd, 0x12, 0xb6, 0x09, 0xd2, 0x13, 0xae, 0x04, 0x7d, 0x11, - 0x5f, 0x4e, 0x6b, 0xcd, 0x0e, 0xab, 0x80, 0xb3, 0x0d, 0x87, 0x49, 0x8b, 0xf4, 0x23, 0xbc, 0xd8, - 0x37, 0xef, 0x11, 0x73, 0x02, 0x38, 0xd3, 0x66, 0xea, 0x1f, 0x11, 0xce, 0x9a, 0xa0, 0xfc, 0xc3, - 0x91, 0xc5, 0x13, 0x82, 0xa7, 0xa2, 0x6e, 0xb7, 0x3a, 0x1f, 0xaf, 0x49, 0x16, 0xcf, 0x44, 0xbf, - 0xd2, 0x63, 0x16, 0x64, 0x26, 0xe3, 0x8d, 0x4e, 0x80, 0x5c, 0xc5, 0xe7, 0x7c, 0x90, 0x8a, 0xf9, - 0xaa, 0x1c, 0x58, 0x16, 0x48, 0xb9, 0x1f, 0x38, 0x99, 0xa9, 0x3c, 0x2a, 0x4c, 0x9b, 0xbd, 0x1b, - 0x51, 0xb6, 0x2b, 0x6c, 0xb8, 0xcb, 0xc1, 0xb1, 0xcb, 0xe0, 0x80, 0xa5, 0x84, 0x9f, 0xf9, 0x27, - 0xae, 0xd9, 0xbb, 0xa1, 0xbf, 0x46, 0x78, 0xc1, 0x04, 0x19, 0x54, 0x1a, 0x5c, 0x9d, 0xa5, 0x06, - 0x0d, 0x4f, 0x37, 0xa0, 0x21, 0xf8, 0x4b, 0xb0, 0x5b, 0xe8, 0xed, 0xe7, 0xf5, 0xb7, 0xb3, 0xf8, - 0x62, 0xf7, 0xb7, 0xcb, 0xe0, 0x87, 0xdc, 0x02, 0xf2, 0x01, 0xe1, 0xb9, 0x54, 0x8f, 0x90, 0x22, - 0xed, 0xb2, 0x3c, 0x1d, 0xe4, 0x25, 0x6d, 0x8f, 0x76, 0xcc, 0x4b, 0x13, 0xf3, 0xc6, 0x8b, 0xa7, - 0x6d, 0xf3, 0xd2, 0x70, 0xa3, 0xf3, 0xdf, 0x27, 0x51, 0x9a, 0xf8, 0x97, 0xb6, 0x0f, 0x17, 0x97, - 0x4a, 0xd7, 0xdf, 0x7c, 0xff, 0x75, 0x3c, 0x91, 0x25, 0x5a, 0x3c, 0x61, 0xc2, 0x92, 0xd1, 0xa2, - 0xb0, 0x3b, 0xb3, 0x80, 0xbc, 0x47, 0xf8, 0x7c, 0x8a, 0x97, 0xc8, 0x6a, 0x0f, 0x7a, 0x7f, 0xc7, - 0x69, 0xf7, 0xc7, 0x07, 0xae, 0x17, 0x62, 0x68, 0x9d, 0xe4, 0xfb, 0x43, 0x1b, 0xaf, 0x02, 0x6e, - 0x1f, 0x91, 0x77, 0x08, 0x5f, 0x48, 0xb7, 0x35, 0xa1, 0x3d, 0xf4, 0x03, 0xfd, 0xaf, 0xad, 0xf5, - 0xe4, 0x0f, 0x33, 0x7f, 0x0b, 0x73, 0x65, 0x38, 0xe6, 0x37, 0x84, 0xe7, 0x07, 0xce, 0x09, 0x72, - 0x6d, 0xa4, 0x63, 0xd2, 0x3d, 0x57, 0xb4, 0xed, 0xbf, 0xef, 0x7a, 0xbb, 0xa6, 0x5e, 0x8c, 0xf5, - 0x2c, 0x91, 0x2b, 0xfd, 0xf5, 0x14, 0x9d, 0x28, 0xbb, 0x58, 0x8f, 0x90, 0x7f, 0x20, 0xbc, 0x30, - 0x64, 0xa8, 0x91, 0xeb, 0xa3, 0xcb, 0xfa, 0x63, 0x0c, 0x6a, 0xbb, 0x63, 0x12, 0xd6, 0xac, 0xaa, - 0x1b, 0xb1, 0xb4, 0x65, 0xb2, 0x34, 0x54, 0x5a, 0xd8, 0x04, 0xff, 0x84, 0xf0, 0x5c, 0xea, 0xc4, - 0x4c, 0x31, 0xf4, 0xa0, 0xc9, 0x3a, 0x56, 0x5f, 0x94, 0x62, 0x15, 0xab, 0xda, 0xe2, 0xb0, 0x03, - 0x67, 0xf8, 0x11, 0xd2, 0x16, 0x5a, 0x21, 0x5f, 0x10, 0xce, 0xf4, 0x1b, 0x9c, 0x64, 0x2d, 0x45, - 0xca, 0xc0, 0x19, 0x3b, 0x56, 0x35, 0x9b, 0xb1, 0x1a, 0xaa, 0x2d, 0x8f, 0xa0, 0xa6, 0x49, 0xb5, - 0x85, 0x56, 0x6e, 0xef, 0x7d, 0x3e, 0xc9, 0xa1, 0xaf, 0x27, 0x39, 0xf4, 0xf3, 0x24, 0x87, 0x9e, - 0xdc, 0x1c, 0xfd, 0xaa, 0x93, 0x7e, 0x51, 0xab, 0xfc, 0x1b, 0x5f, 0x72, 0x36, 0x7e, 0x07, 0x00, - 0x00, 0xff, 0xff, 0xd2, 0xb2, 0x06, 0xc2, 0xd0, 0x09, 0x00, 0x00, + // 773 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x96, 0xdd, 0x6a, 0xe3, 0x46, + 0x14, 0xc7, 0x99, 0x7c, 0x91, 0x4c, 0x2e, 0xda, 0x4e, 0x49, 0x6b, 0x84, 0xe3, 0xb8, 0xa2, 0x4d, + 0x9c, 0xa4, 0x1e, 0xc5, 0x49, 0x4a, 0x4b, 0xae, 0xda, 0x52, 0x5a, 0x68, 0x3e, 0x5a, 0x64, 0x68, + 0xa1, 0x37, 0x65, 0x2c, 0x9d, 0xd8, 0x53, 0x4b, 0x1a, 0x75, 0x66, 0xa4, 0x34, 0x2d, 0xb9, 0xe9, + 0x3e, 0x42, 0x9e, 0x60, 0x61, 0x1f, 0x62, 0xd9, 0xfb, 0x85, 0xbd, 0x5a, 0x96, 0xdd, 0xbb, 0xbd, + 0x58, 0x96, 0xb0, 0x0f, 0xb2, 0x48, 0xb6, 0xec, 0xac, 0x2d, 0x7f, 0xc0, 0x3a, 0x57, 0x1e, 0x9f, + 0x39, 0x3a, 0xfa, 0xfd, 0x0f, 0xe7, 0xfc, 0x11, 0x3e, 0x0c, 0xdb, 0x4d, 0x8b, 0x85, 0xdc, 0xf1, + 0x38, 0x04, 0xda, 0xba, 0x10, 0xb2, 0x7d, 0xee, 0x89, 0x0b, 0x26, 0x9d, 0x16, 0x8f, 0xa1, 0xf7, + 0xbf, 0xda, 0x0d, 0xd0, 0x50, 0x0a, 0x2d, 0xc8, 0x07, 0x03, 0x79, 0x46, 0xb1, 0x29, 0x44, 0xd3, + 0x83, 0xa4, 0x92, 0xc5, 0x82, 0x40, 0x68, 0xa6, 0xb9, 0x08, 0x54, 0x27, 0xdd, 0x38, 0x6c, 0x7f, + 0xa3, 0x28, 0x17, 0xc9, 0xad, 0xcf, 0x9c, 0x16, 0x0f, 0x40, 0x5e, 0x5a, 0xdd, 0x17, 0x2b, 0xcb, + 0x07, 0xcd, 0xac, 0xb8, 0x66, 0x35, 0x21, 0x00, 0xc9, 0x34, 0xb8, 0xdd, 0xa7, 0x4e, 0x9b, 0x5c, + 0xb7, 0xa2, 0x06, 0x75, 0x84, 0x6f, 0x31, 0xd9, 0x14, 0xa1, 0x14, 0x7f, 0xa5, 0x87, 0x6a, 0xf6, + 0x76, 0xd5, 0x2f, 0x92, 0x85, 0xac, 0xb8, 0xc6, 0xbc, 0xb0, 0xc5, 0x86, 0xca, 0x99, 0xd7, 0x08, + 0x17, 0x4f, 0xb8, 0xd2, 0xdf, 0x75, 0x90, 0xdd, 0xdf, 0xb3, 0x22, 0x36, 0xfc, 0x1d, 0x81, 0xd2, + 0xa4, 0x8e, 0x57, 0x3d, 0xae, 0xf4, 0x2f, 0x61, 0x8a, 0x5e, 0x40, 0x65, 0x54, 0x59, 0xdd, 0xaf, + 0xd1, 0x0e, 0x3b, 0xbd, 0xcd, 0x4e, 0xc3, 0x76, 0x33, 0x09, 0x28, 0x9a, 0xb0, 0xd3, 0xb8, 0x46, + 0x4f, 0xfa, 0x0f, 0xda, 0xb7, 0xab, 0x90, 0x12, 0xc6, 0x01, 0xf3, 0xe1, 0x57, 0x09, 0xe7, 0xfc, + 0x9f, 0xc2, 0x5c, 0x19, 0x55, 0x56, 0xec, 0x5b, 0x11, 0x93, 0x62, 0xe3, 0x27, 0x18, 0x62, 0xca, + 0x90, 0x3e, 0xc4, 0xf3, 0x11, 0x77, 0x53, 0x94, 0x15, 0x3b, 0x39, 0x9a, 0x35, 0xbc, 0xfe, 0x03, + 0x78, 0xa0, 0x61, 0xfa, 0x47, 0x3e, 0xc3, 0x1b, 0x83, 0xc9, 0x9d, 0x12, 0xae, 0x0d, 0x2a, 0x14, + 0x81, 0x02, 0x73, 0x13, 0x7f, 0x9e, 0xd7, 0x9a, 0x13, 0xd6, 0x00, 0xef, 0x18, 0x2e, 0xb3, 0x16, + 0x99, 0x57, 0x78, 0x73, 0x64, 0xde, 0x6f, 0xcc, 0x8b, 0xe0, 0x4e, 0x9b, 0x69, 0xbe, 0x42, 0xb8, + 0x68, 0x83, 0x96, 0x97, 0x53, 0x8b, 0x27, 0x04, 0x2f, 0x24, 0xdd, 0xee, 0x76, 0x3e, 0x3d, 0x93, + 0x22, 0x5e, 0x49, 0x7e, 0x55, 0xc8, 0x1c, 0x28, 0xcc, 0xa7, 0x17, 0xfd, 0x00, 0xf9, 0x12, 0x7f, + 0x24, 0x41, 0x69, 0x26, 0x75, 0x3d, 0x72, 0x1c, 0x50, 0xea, 0x3c, 0xf2, 0x0a, 0x0b, 0x65, 0x54, + 0x59, 0xb6, 0x87, 0x2f, 0x92, 0xec, 0x40, 0xb8, 0xf0, 0x23, 0x07, 0xcf, 0xad, 0x83, 0x07, 0x8e, + 0x16, 0xb2, 0xb0, 0x98, 0xd6, 0x1c, 0xbe, 0x48, 0xa6, 0x21, 0x64, 0x92, 0xf9, 0xa0, 0x41, 0xaa, + 0xc2, 0x52, 0x79, 0x3e, 0x99, 0x86, 0x7e, 0xc4, 0xbc, 0x8f, 0xf0, 0x86, 0x0d, 0x2a, 0x6a, 0xf8, + 0x5c, 0xdf, 0xa5, 0x46, 0x03, 0x2f, 0xfb, 0xe0, 0x0b, 0xfe, 0x2f, 0xb8, 0x5d, 0x69, 0xbd, 0xff, + 0x03, 0x8c, 0x8b, 0x83, 0x8c, 0xfb, 0xf7, 0x56, 0xf1, 0xa7, 0x83, 0x6c, 0x75, 0x90, 0x31, 0x77, + 0x80, 0x3c, 0x42, 0x78, 0x2d, 0x77, 0xc7, 0x48, 0x95, 0x0e, 0x58, 0x06, 0x1d, 0xb7, 0x8b, 0xc6, + 0x19, 0xed, 0x2f, 0x3f, 0xcd, 0x96, 0x3f, 0x3d, 0xfc, 0xd9, 0x5b, 0x7e, 0x1a, 0x1f, 0xf4, 0x67, + 0x27, 0x8b, 0xd2, 0x6c, 0xff, 0x69, 0x6f, 0x38, 0xb9, 0xd2, 0xa6, 0xf9, 0xff, 0x8b, 0x37, 0xd7, + 0x73, 0x45, 0x62, 0xa4, 0x0e, 0x15, 0xd7, 0xac, 0x2e, 0x85, 0xdb, 0xf7, 0x12, 0xf2, 0x10, 0xe1, + 0x8f, 0x73, 0x76, 0x91, 0xec, 0x0e, 0xa1, 0x8f, 0xde, 0x58, 0xe3, 0xe7, 0xd9, 0x81, 0x9b, 0x95, + 0x14, 0xda, 0x24, 0xe5, 0xd1, 0xd0, 0xd6, 0x7f, 0x11, 0x77, 0xaf, 0xc8, 0x03, 0x84, 0x3f, 0xc9, + 0xb7, 0x05, 0x42, 0x87, 0xe8, 0xc7, 0xfa, 0x87, 0xb1, 0x37, 0x94, 0x3f, 0xc9, 0x3c, 0xba, 0x98, + 0x3b, 0x93, 0x31, 0x9f, 0x23, 0xbc, 0x3e, 0xd6, 0x67, 0xc8, 0x57, 0x53, 0x8d, 0xc9, 0xa0, 0x2f, + 0x19, 0xc7, 0xef, 0xdf, 0xf5, 0x5e, 0x4d, 0xb3, 0x9a, 0xea, 0xd9, 0x22, 0x5f, 0x8c, 0xd6, 0x53, + 0xf5, 0x92, 0xec, 0x6a, 0x3b, 0x41, 0x7e, 0x89, 0xf0, 0xc6, 0x04, 0x53, 0x24, 0x5f, 0x4f, 0x2f, + 0xeb, 0x1d, 0x1b, 0x35, 0x4e, 0x67, 0x24, 0xac, 0x53, 0xd5, 0xb4, 0x52, 0x69, 0xdb, 0x64, 0x6b, + 0xa2, 0xb4, 0xb8, 0x03, 0xfe, 0x18, 0xe1, 0xb5, 0x5c, 0xc7, 0xcd, 0x59, 0xe8, 0x71, 0xce, 0x3c, + 0xd3, 0xbd, 0xa8, 0xa5, 0x2a, 0x76, 0x8d, 0xcd, 0x49, 0x03, 0x67, 0xc9, 0x04, 0xe9, 0x08, 0xed, + 0x90, 0xa7, 0x08, 0x17, 0x46, 0x19, 0x2b, 0xd9, 0xcb, 0x91, 0x32, 0xd6, 0x83, 0x67, 0xaa, 0xe6, + 0x30, 0x55, 0x43, 0x8d, 0xed, 0x29, 0xd4, 0x74, 0xa8, 0x8e, 0xd0, 0xce, 0xf7, 0x67, 0x4f, 0x6e, + 0x4a, 0xe8, 0xd9, 0x4d, 0x09, 0xbd, 0xbe, 0x29, 0xa1, 0x3f, 0xbe, 0x9d, 0xfe, 0x53, 0x29, 0xff, + 0x43, 0xaf, 0xb1, 0x94, 0x7e, 0x24, 0x1d, 0xbc, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x86, 0x56, 0x79, + 0xf8, 0x10, 0x0a, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1048,6 +1066,15 @@ func (m *RetryArchivedWorkflowRequest) MarshalToSizedBuffer(dAtA []byte) (int, e i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.Parameters) > 0 { + for iNdEx := len(m.Parameters) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Parameters[iNdEx]) + copy(dAtA[i:], m.Parameters[iNdEx]) + i = encodeVarintWorkflowArchive(dAtA, i, uint64(len(m.Parameters[iNdEx]))) + i-- + dAtA[i] = 0x32 + } + } if len(m.NodeFieldSelector) > 0 { i -= len(m.NodeFieldSelector) copy(dAtA[i:], m.NodeFieldSelector) @@ -1113,6 +1140,15 @@ func (m *ResubmitArchivedWorkflowRequest) MarshalToSizedBuffer(dAtA []byte) (int i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.Parameters) > 0 { + for iNdEx := len(m.Parameters) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Parameters[iNdEx]) + copy(dAtA[i:], m.Parameters[iNdEx]) + i = encodeVarintWorkflowArchive(dAtA, i, uint64(len(m.Parameters[iNdEx]))) + i-- + dAtA[i] = 0x2a + } + } if m.Memoized { i-- if m.Memoized { @@ -1275,6 +1311,12 @@ func (m *RetryArchivedWorkflowRequest) Size() (n int) { if l > 0 { n += 1 + l + sovWorkflowArchive(uint64(l)) } + if len(m.Parameters) > 0 { + for _, s := range m.Parameters { + l = len(s) + n += 1 + l + sovWorkflowArchive(uint64(l)) + } + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -1302,6 +1344,12 @@ func (m *ResubmitArchivedWorkflowRequest) Size() (n int) { if m.Memoized { n += 2 } + if len(m.Parameters) > 0 { + for _, s := range m.Parameters { + l = len(s) + n += 1 + l + sovWorkflowArchive(uint64(l)) + } + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -1965,6 +2013,38 @@ func (m *RetryArchivedWorkflowRequest) Unmarshal(dAtA []byte) error { } m.NodeFieldSelector = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Parameters", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowWorkflowArchive + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthWorkflowArchive + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthWorkflowArchive + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Parameters = append(m.Parameters, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipWorkflowArchive(dAtA[iNdEx:]) @@ -2132,6 +2212,38 @@ func (m *ResubmitArchivedWorkflowRequest) Unmarshal(dAtA []byte) error { } } m.Memoized = bool(v != 0) + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Parameters", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowWorkflowArchive + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthWorkflowArchive + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthWorkflowArchive + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Parameters = append(m.Parameters, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipWorkflowArchive(dAtA[iNdEx:]) diff --git a/pkg/apiclient/workflowarchive/workflow-archive.proto b/pkg/apiclient/workflowarchive/workflow-archive.proto index bcaa2be78cbd..f06c6aed3cd1 100644 --- a/pkg/apiclient/workflowarchive/workflow-archive.proto +++ b/pkg/apiclient/workflowarchive/workflow-archive.proto @@ -8,63 +8,65 @@ import "github.com/argoproj/argo-workflows/pkg/apis/workflow/v1alpha1/generated. package workflowarchive; message ListArchivedWorkflowsRequest { - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; - string namePrefix = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; + string namePrefix = 2; } message GetArchivedWorkflowRequest { - string uid = 1; + string uid = 1; } message DeleteArchivedWorkflowRequest { - string uid = 1; + string uid = 1; } message ArchivedWorkflowDeletedResponse { } message ListArchivedWorkflowLabelKeysRequest { } message ListArchivedWorkflowLabelValuesRequest { - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 1; } message RetryArchivedWorkflowRequest { - string uid = 1; - string name = 2; - string namespace = 3; - bool restartSuccessful = 4; - string nodeFieldSelector = 5; + string uid = 1; + string name = 2; + string namespace = 3; + bool restartSuccessful = 4; + string nodeFieldSelector = 5; + repeated string parameters = 6; } message ResubmitArchivedWorkflowRequest { - string uid = 1; - string name = 2; - string namespace = 3; - bool memoized = 4; + string uid = 1; + string name = 2; + string namespace = 3; + bool memoized = 4; + repeated string parameters = 5; } service ArchivedWorkflowService { - rpc ListArchivedWorkflows (ListArchivedWorkflowsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowList) { - option (google.api.http).get = "/api/v1/archived-workflows"; - } - rpc GetArchivedWorkflow (GetArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http).get = "/api/v1/archived-workflows/{uid}"; - } - rpc DeleteArchivedWorkflow (DeleteArchivedWorkflowRequest) returns (ArchivedWorkflowDeletedResponse) { - option (google.api.http).delete = "/api/v1/archived-workflows/{uid}"; - } - rpc ListArchivedWorkflowLabelKeys (ListArchivedWorkflowLabelKeysRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LabelKeys) { - option (google.api.http).get = "/api/v1/archived-workflows-label-keys"; - } - rpc ListArchivedWorkflowLabelValues (ListArchivedWorkflowLabelValuesRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LabelValues) { - option (google.api.http).get = "/api/v1/archived-workflows-label-values"; - } - rpc RetryArchivedWorkflow (RetryArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/archived-workflows/{uid}/retry" - body: "*" - }; - } - rpc ResubmitArchivedWorkflow (ResubmitArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { - option (google.api.http) = { - put: "/api/v1/archived-workflows/{uid}/resubmit" - body: "*" - }; - } + rpc ListArchivedWorkflows(ListArchivedWorkflowsRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowList) { + option (google.api.http).get = "/api/v1/archived-workflows"; + } + rpc GetArchivedWorkflow(GetArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http).get = "/api/v1/archived-workflows/{uid}"; + } + rpc DeleteArchivedWorkflow(DeleteArchivedWorkflowRequest) returns (ArchivedWorkflowDeletedResponse) { + option (google.api.http).delete = "/api/v1/archived-workflows/{uid}"; + } + rpc ListArchivedWorkflowLabelKeys(ListArchivedWorkflowLabelKeysRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LabelKeys) { + option (google.api.http).get = "/api/v1/archived-workflows-label-keys"; + } + rpc ListArchivedWorkflowLabelValues(ListArchivedWorkflowLabelValuesRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LabelValues) { + option (google.api.http).get = "/api/v1/archived-workflows-label-values"; + } + rpc RetryArchivedWorkflow(RetryArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/archived-workflows/{uid}/retry" + body : "*" + }; + } + rpc ResubmitArchivedWorkflow(ResubmitArchivedWorkflowRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow) { + option (google.api.http) = { + put : "/api/v1/archived-workflows/{uid}/resubmit" + body : "*" + }; + } } diff --git a/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go b/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go index f141b44d0ae8..9be32335d0a5 100644 --- a/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go +++ b/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v1.1.1. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/apiclient/workflowtemplate/workflow-template.proto b/pkg/apiclient/workflowtemplate/workflow-template.proto index f8abf53e60ce..f8301303f2ff 100644 --- a/pkg/apiclient/workflowtemplate/workflow-template.proto +++ b/pkg/apiclient/workflowtemplate/workflow-template.proto @@ -10,76 +10,74 @@ import "github.com/argoproj/argo-workflows/pkg/apis/workflow/v1alpha1/generated. // Workflow Service API performs CRUD actions against application resources package workflowtemplate; - message WorkflowTemplateCreateRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; } message WorkflowTemplateGetRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.GetOptions getOptions = 3; } message WorkflowTemplateListRequest { - string namespace = 1; - k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; + string namespace = 1; + k8s.io.apimachinery.pkg.apis.meta.v1.ListOptions listOptions = 2; } message WorkflowTemplateUpdateRequest { - // DEPRECATED: This field is ignored. - string name = 1 [deprecated=true]; - string namespace = 2; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 3; + // DEPRECATED: This field is ignored. + string name = 1 [ deprecated = true ]; + string namespace = 2; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 3; } message WorkflowTemplateDeleteRequest { - string name = 1; - string namespace = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; + string name = 1; + string namespace = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.DeleteOptions deleteOptions = 3; } message WorkflowTemplateDeleteResponse { } message WorkflowTemplateLintRequest { - string namespace = 1; - github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 2; - k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; + string namespace = 1; + github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate template = 2; + k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; } service WorkflowTemplateService { - rpc CreateWorkflowTemplate (WorkflowTemplateCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { - option (google.api.http) = { - post: "/api/v1/workflow-templates/{namespace}" - body: "*" - }; - } - - rpc GetWorkflowTemplate (WorkflowTemplateGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { - option (google.api.http).get = "/api/v1/workflow-templates/{namespace}/{name}"; - } + rpc CreateWorkflowTemplate(WorkflowTemplateCreateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { + option (google.api.http) = { + post : "/api/v1/workflow-templates/{namespace}" + body : "*" + }; + } - rpc ListWorkflowTemplates (WorkflowTemplateListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplateList) { - option (google.api.http).get = "/api/v1/workflow-templates/{namespace}"; - } + rpc GetWorkflowTemplate(WorkflowTemplateGetRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { + option (google.api.http).get = "/api/v1/workflow-templates/{namespace}/{name}"; + } - rpc UpdateWorkflowTemplate (WorkflowTemplateUpdateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { - option (google.api.http) = { - put: "/api/v1/workflow-templates/{namespace}/{name}" - body: "*" - }; - } + rpc ListWorkflowTemplates(WorkflowTemplateListRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplateList) { + option (google.api.http).get = "/api/v1/workflow-templates/{namespace}"; + } - rpc DeleteWorkflowTemplate (WorkflowTemplateDeleteRequest) returns (WorkflowTemplateDeleteResponse) { - option (google.api.http).delete = "/api/v1/workflow-templates/{namespace}/{name}"; - } + rpc UpdateWorkflowTemplate(WorkflowTemplateUpdateRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { + option (google.api.http) = { + put : "/api/v1/workflow-templates/{namespace}/{name}" + body : "*" + }; + } - rpc LintWorkflowTemplate (WorkflowTemplateLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { - option (google.api.http) = { - post: "/api/v1/workflow-templates/{namespace}/lint" - body: "*" - }; - } + rpc DeleteWorkflowTemplate(WorkflowTemplateDeleteRequest) returns (WorkflowTemplateDeleteResponse) { + option (google.api.http).delete = "/api/v1/workflow-templates/{namespace}/{name}"; + } + rpc LintWorkflowTemplate(WorkflowTemplateLintRequest) returns (github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowTemplate) { + option (google.api.http) = { + post : "/api/v1/workflow-templates/{namespace}/lint" + body : "*" + }; + } } diff --git a/pkg/apis/api-rules/violation_exceptions.list b/pkg/apis/api-rules/violation_exceptions.list index 7ddb7513e54b..f0e0b3bf5847 100644 --- a/pkg/apis/api-rules/violation_exceptions.list +++ b/pkg/apis/api-rules/violation_exceptions.list @@ -9,6 +9,7 @@ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,GitArtifact,Fetch API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,HDFSConfig,Addresses API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,HTTPArtifact,Headers +API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,HTTPBodySource,Bytes API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,Histogram,Buckets API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,Inputs,Parameters API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,LabelKeys,Items @@ -16,6 +17,8 @@ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,Metrics,Prometheus API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,NodeStatus,Children API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,NodeStatus,OutboundNodes +API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,OAuth2Auth,EndpointParams +API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,OAuth2Auth,Scopes API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,Outputs,Parameters API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,ParallelSteps,Steps API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,Parameter,Enum @@ -38,5 +41,8 @@ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/ API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,WorkflowSpec,Volumes API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,WorkflowStatus,PersistentVolumeClaims API rule violation: list_type_missing,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,WorkflowStep,WithItems +API rule violation: names_match,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,ArtifactSearchResult,Artifact +API rule violation: names_match,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,ArtifactSearchResult,NodeID +API rule violation: names_match,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,HTTPAuth,OAuth2 API rule violation: names_match,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,SubmitOpts,Entrypoint API rule violation: names_match,github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1,WorkflowStatus,StoredWorkflowSpec diff --git a/pkg/apis/workflow/register.go b/pkg/apis/workflow/register.go index 50e3e3b61c5e..82f124aa0d98 100644 --- a/pkg/apis/workflow/register.go +++ b/pkg/apis/workflow/register.go @@ -33,4 +33,9 @@ const ( WorkflowTaskSetShortName string = "wfts" WorkflowTaskSetFullName string = WorkflowTaskSetPlural + "." + Group WorkflowTaskResultKind string = "WorkflowTaskResult" + WorkflowArtifactGCTaskKind string = "WorkflowArtifactGCTask" + WorkflowArtifactGCTaskSingular string = "workflowartifactgctask" + WorkflowArtifactGCTaskPlural string = "workflowartifactgctasks" + WorkflowArtifactGCTaskShortName string = "wfat" + WorkflowArtifactGCTaskFullName string = WorkflowArtifactGCTaskPlural + "." + Group ) diff --git a/pkg/apis/workflow/v1alpha1/artifact_gc_task_types.go b/pkg/apis/workflow/v1alpha1/artifact_gc_task_types.go new file mode 100644 index 000000000000..dbc840d58318 --- /dev/null +++ b/pkg/apis/workflow/v1alpha1/artifact_gc_task_types.go @@ -0,0 +1,63 @@ +package v1alpha1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// WorkflowArtifactGCTask specifies the Artifacts that need to be deleted as well as the status of deletion +// +genclient +// +kubebuilder:resource:shortName=wfat +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +kubebuilder:subresource:status +type WorkflowArtifactGCTask struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` + Spec ArtifactGCSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` + Status ArtifactGCStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` +} + +// ArtifactGCSpec specifies the Artifacts that need to be deleted +type ArtifactGCSpec struct { + // ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node + ArtifactsByNode map[string]ArtifactNodeSpec `json:"artifactsByNode,omitempty" protobuf:"bytes,1,rep,name=artifactsByNode"` +} + +// ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node +type ArtifactNodeSpec struct { + // ArchiveLocation is the template-level Artifact location specification + ArchiveLocation *ArtifactLocation `json:"archiveLocation,omitempty" protobuf:"bytes,1,opt,name=archiveLocation"` + // Artifacts maps artifact name to Artifact description + Artifacts map[string]Artifact `json:"artifacts,omitempty" protobuf:"bytes,2,rep,name=artifacts"` +} + +// ArtifactGCStatus describes the result of the deletion +type ArtifactGCStatus struct { + // ArtifactResultsByNode maps Node name to result + ArtifactResultsByNode map[string]ArtifactResultNodeStatus `json:"artifactResultsByNode,omitempty" protobuf:"bytes,1,rep,name=artifactResultsByNode"` +} + +// ArtifactResultNodeStatus describes the result of the deletion on a given node +type ArtifactResultNodeStatus struct { + // ArtifactResults maps Artifact name to result of the deletion + ArtifactResults map[string]ArtifactResult `json:"artifactResults,omitempty" protobuf:"bytes,1,rep,name=artifactResults"` +} + +// ArtifactResult describes the result of attempting to delete a given Artifact +type ArtifactResult struct { + // Name is the name of the Artifact + Name string `json:"name" protobuf:"bytes,1,opt,name=name"` + + // Success describes whether the deletion succeeded + Success bool `json:"success,omitempty" protobuf:"varint,2,opt,name=success"` + + // Error is an optional error message which should be set if Success==false + Error *string `json:"error,omitempty" protobuf:"bytes,3,opt,name=error"` +} + +// WorkflowArtifactGCTaskList is list of WorkflowArtifactGCTask resources +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +type WorkflowArtifactGCTaskList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` + Items []WorkflowArtifactGCTask `json:"items" protobuf:"bytes,2,opt,name=items"` +} diff --git a/pkg/apis/workflow/v1alpha1/artifact_repository_types.go b/pkg/apis/workflow/v1alpha1/artifact_repository_types.go index 9f506c529e62..2e78ad5e6610 100644 --- a/pkg/apis/workflow/v1alpha1/artifact_repository_types.go +++ b/pkg/apis/workflow/v1alpha1/artifact_repository_types.go @@ -24,6 +24,8 @@ type ArtifactRepository struct { OSS *OSSArtifactRepository `json:"oss,omitempty" protobuf:"bytes,5,opt,name=oss"` // GCS stores artifact in a GCS object store GCS *GCSArtifactRepository `json:"gcs,omitempty" protobuf:"bytes,6,opt,name=gcs"` + // Azure stores artifact in an Azure Storage account + Azure *AzureArtifactRepository `json:"azure,omitempty" protobuf:"bytes,7,opt,name=azure"` } func (a *ArtifactRepository) IsArchiveLogs() bool { @@ -39,6 +41,8 @@ func (a *ArtifactRepository) Get() ArtifactRepositoryType { return nil } else if a.Artifactory != nil { return a.Artifactory + } else if a.Azure != nil { + return a.Azure } else if a.GCS != nil { return a.GCS } else if a.HDFS != nil { @@ -133,6 +137,22 @@ func (r *ArtifactoryArtifactRepository) IntoArtifactLocation(l *ArtifactLocation l.Artifactory = &ArtifactoryArtifact{ArtifactoryAuth: r.ArtifactoryAuth, URL: u} } +// AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository +type AzureArtifactRepository struct { + AzureBlobContainer `json:",inline" protobuf:"bytes,1,opt,name=blobContainer"` + + // BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables + BlobNameFormat string `json:"blobNameFormat,omitempty" protobuf:"bytes,2,opt,name=blobNameFormat"` +} + +func (r *AzureArtifactRepository) IntoArtifactLocation(l *ArtifactLocation) { + k := r.BlobNameFormat + if k == "" { + k = DefaultArchivePattern + } + l.Azure = &AzureArtifact{AzureBlobContainer: r.AzureBlobContainer, Blob: k} +} + // HDFSArtifactRepository defines the controller configuration for an HDFS artifact repository type HDFSArtifactRepository struct { HDFSConfig `json:",inline" protobuf:"bytes,1,opt,name=hDFSConfig"` diff --git a/pkg/apis/workflow/v1alpha1/artifact_repository_types_test.go b/pkg/apis/workflow/v1alpha1/artifact_repository_types_test.go index 48dbf9b1552c..228d750d672e 100644 --- a/pkg/apis/workflow/v1alpha1/artifact_repository_types_test.go +++ b/pkg/apis/workflow/v1alpha1/artifact_repository_types_test.go @@ -27,6 +27,14 @@ func TestArtifactRepository(t *testing.T) { assert.Equal(t, "http://my-repo/{{workflow.name}}/{{pod.name}}", l.Artifactory.URL) } }) + t.Run("Azure", func(t *testing.T) { + r := &ArtifactRepository{Azure: &AzureArtifactRepository{}} + assert.IsType(t, &AzureArtifactRepository{}, r.Get()) + l := r.ToArtifactLocation() + if assert.NotNil(t, l.Azure) { + assert.Equal(t, "{{workflow.name}}/{{pod.name}}", l.Azure.Blob) + } + }) t.Run("GCS", func(t *testing.T) { r := &ArtifactRepository{GCS: &GCSArtifactRepository{}} assert.IsType(t, &GCSArtifactRepository{}, r.Get()) diff --git a/pkg/apis/workflow/v1alpha1/common.go b/pkg/apis/workflow/v1alpha1/common.go index df2c01681040..6a7c584b4601 100644 --- a/pkg/apis/workflow/v1alpha1/common.go +++ b/pkg/apis/workflow/v1alpha1/common.go @@ -36,6 +36,12 @@ type TemplateReferenceHolder interface { GetTemplateRef() *TemplateRef // GetTemplateName returns the template name. This maybe empty. This is last precedence. GetTemplateName() string + // GetName returns the name of the template reference holder. + GetName() string + // IsDAGTask returns true if the template reference is a DAGTask. + IsDAGTask() bool + // IsWorkflowStep returns true if the template reference is a WorkflowStep. + IsWorkflowStep() bool } // SubmitOpts are workflow submission options diff --git a/pkg/apis/workflow/v1alpha1/generated.pb.go b/pkg/apis/workflow/v1alpha1/generated.pb.go index 33838a61eb67..c5c1d1aaa6e3 100644 --- a/pkg/apis/workflow/v1alpha1/generated.pb.go +++ b/pkg/apis/workflow/v1alpha1/generated.pb.go @@ -120,10 +120,38 @@ func (m *Arguments) XXX_DiscardUnknown() { var xxx_messageInfo_Arguments proto.InternalMessageInfo +func (m *ArtGCStatus) Reset() { *m = ArtGCStatus{} } +func (*ArtGCStatus) ProtoMessage() {} +func (*ArtGCStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{3} +} +func (m *ArtGCStatus) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtGCStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtGCStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtGCStatus.Merge(m, src) +} +func (m *ArtGCStatus) XXX_Size() int { + return m.Size() +} +func (m *ArtGCStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ArtGCStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtGCStatus proto.InternalMessageInfo + func (m *Artifact) Reset() { *m = Artifact{} } func (*Artifact) ProtoMessage() {} func (*Artifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{3} + return fileDescriptor_724696e352c3df5f, []int{4} } func (m *Artifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -148,10 +176,94 @@ func (m *Artifact) XXX_DiscardUnknown() { var xxx_messageInfo_Artifact proto.InternalMessageInfo +func (m *ArtifactGC) Reset() { *m = ArtifactGC{} } +func (*ArtifactGC) ProtoMessage() {} +func (*ArtifactGC) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{5} +} +func (m *ArtifactGC) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactGC) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactGC) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactGC.Merge(m, src) +} +func (m *ArtifactGC) XXX_Size() int { + return m.Size() +} +func (m *ArtifactGC) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactGC.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactGC proto.InternalMessageInfo + +func (m *ArtifactGCSpec) Reset() { *m = ArtifactGCSpec{} } +func (*ArtifactGCSpec) ProtoMessage() {} +func (*ArtifactGCSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{6} +} +func (m *ArtifactGCSpec) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactGCSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactGCSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactGCSpec.Merge(m, src) +} +func (m *ArtifactGCSpec) XXX_Size() int { + return m.Size() +} +func (m *ArtifactGCSpec) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactGCSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactGCSpec proto.InternalMessageInfo + +func (m *ArtifactGCStatus) Reset() { *m = ArtifactGCStatus{} } +func (*ArtifactGCStatus) ProtoMessage() {} +func (*ArtifactGCStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{7} +} +func (m *ArtifactGCStatus) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactGCStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactGCStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactGCStatus.Merge(m, src) +} +func (m *ArtifactGCStatus) XXX_Size() int { + return m.Size() +} +func (m *ArtifactGCStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactGCStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactGCStatus proto.InternalMessageInfo + func (m *ArtifactLocation) Reset() { *m = ArtifactLocation{} } func (*ArtifactLocation) ProtoMessage() {} func (*ArtifactLocation) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{4} + return fileDescriptor_724696e352c3df5f, []int{8} } func (m *ArtifactLocation) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -176,10 +288,38 @@ func (m *ArtifactLocation) XXX_DiscardUnknown() { var xxx_messageInfo_ArtifactLocation proto.InternalMessageInfo +func (m *ArtifactNodeSpec) Reset() { *m = ArtifactNodeSpec{} } +func (*ArtifactNodeSpec) ProtoMessage() {} +func (*ArtifactNodeSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{9} +} +func (m *ArtifactNodeSpec) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactNodeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactNodeSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactNodeSpec.Merge(m, src) +} +func (m *ArtifactNodeSpec) XXX_Size() int { + return m.Size() +} +func (m *ArtifactNodeSpec) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactNodeSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactNodeSpec proto.InternalMessageInfo + func (m *ArtifactPaths) Reset() { *m = ArtifactPaths{} } func (*ArtifactPaths) ProtoMessage() {} func (*ArtifactPaths) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{5} + return fileDescriptor_724696e352c3df5f, []int{10} } func (m *ArtifactPaths) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -207,7 +347,7 @@ var xxx_messageInfo_ArtifactPaths proto.InternalMessageInfo func (m *ArtifactRepository) Reset() { *m = ArtifactRepository{} } func (*ArtifactRepository) ProtoMessage() {} func (*ArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{6} + return fileDescriptor_724696e352c3df5f, []int{11} } func (m *ArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -235,7 +375,7 @@ var xxx_messageInfo_ArtifactRepository proto.InternalMessageInfo func (m *ArtifactRepositoryRef) Reset() { *m = ArtifactRepositoryRef{} } func (*ArtifactRepositoryRef) ProtoMessage() {} func (*ArtifactRepositoryRef) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{7} + return fileDescriptor_724696e352c3df5f, []int{12} } func (m *ArtifactRepositoryRef) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -263,7 +403,7 @@ var xxx_messageInfo_ArtifactRepositoryRef proto.InternalMessageInfo func (m *ArtifactRepositoryRefStatus) Reset() { *m = ArtifactRepositoryRefStatus{} } func (*ArtifactRepositoryRefStatus) ProtoMessage() {} func (*ArtifactRepositoryRefStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{8} + return fileDescriptor_724696e352c3df5f, []int{13} } func (m *ArtifactRepositoryRefStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -288,10 +428,122 @@ func (m *ArtifactRepositoryRefStatus) XXX_DiscardUnknown() { var xxx_messageInfo_ArtifactRepositoryRefStatus proto.InternalMessageInfo +func (m *ArtifactResult) Reset() { *m = ArtifactResult{} } +func (*ArtifactResult) ProtoMessage() {} +func (*ArtifactResult) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{14} +} +func (m *ArtifactResult) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactResult.Merge(m, src) +} +func (m *ArtifactResult) XXX_Size() int { + return m.Size() +} +func (m *ArtifactResult) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactResult proto.InternalMessageInfo + +func (m *ArtifactResultNodeStatus) Reset() { *m = ArtifactResultNodeStatus{} } +func (*ArtifactResultNodeStatus) ProtoMessage() {} +func (*ArtifactResultNodeStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{15} +} +func (m *ArtifactResultNodeStatus) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactResultNodeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactResultNodeStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactResultNodeStatus.Merge(m, src) +} +func (m *ArtifactResultNodeStatus) XXX_Size() int { + return m.Size() +} +func (m *ArtifactResultNodeStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactResultNodeStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactResultNodeStatus proto.InternalMessageInfo + +func (m *ArtifactSearchQuery) Reset() { *m = ArtifactSearchQuery{} } +func (*ArtifactSearchQuery) ProtoMessage() {} +func (*ArtifactSearchQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{16} +} +func (m *ArtifactSearchQuery) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactSearchQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactSearchQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactSearchQuery.Merge(m, src) +} +func (m *ArtifactSearchQuery) XXX_Size() int { + return m.Size() +} +func (m *ArtifactSearchQuery) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactSearchQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactSearchQuery proto.InternalMessageInfo + +func (m *ArtifactSearchResult) Reset() { *m = ArtifactSearchResult{} } +func (*ArtifactSearchResult) ProtoMessage() {} +func (*ArtifactSearchResult) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{17} +} +func (m *ArtifactSearchResult) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ArtifactSearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ArtifactSearchResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactSearchResult.Merge(m, src) +} +func (m *ArtifactSearchResult) XXX_Size() int { + return m.Size() +} +func (m *ArtifactSearchResult) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactSearchResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactSearchResult proto.InternalMessageInfo + func (m *ArtifactoryArtifact) Reset() { *m = ArtifactoryArtifact{} } func (*ArtifactoryArtifact) ProtoMessage() {} func (*ArtifactoryArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{9} + return fileDescriptor_724696e352c3df5f, []int{18} } func (m *ArtifactoryArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -319,7 +571,7 @@ var xxx_messageInfo_ArtifactoryArtifact proto.InternalMessageInfo func (m *ArtifactoryArtifactRepository) Reset() { *m = ArtifactoryArtifactRepository{} } func (*ArtifactoryArtifactRepository) ProtoMessage() {} func (*ArtifactoryArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{10} + return fileDescriptor_724696e352c3df5f, []int{19} } func (m *ArtifactoryArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -347,7 +599,7 @@ var xxx_messageInfo_ArtifactoryArtifactRepository proto.InternalMessageInfo func (m *ArtifactoryAuth) Reset() { *m = ArtifactoryAuth{} } func (*ArtifactoryAuth) ProtoMessage() {} func (*ArtifactoryAuth) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{11} + return fileDescriptor_724696e352c3df5f, []int{20} } func (m *ArtifactoryAuth) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -372,10 +624,94 @@ func (m *ArtifactoryAuth) XXX_DiscardUnknown() { var xxx_messageInfo_ArtifactoryAuth proto.InternalMessageInfo +func (m *AzureArtifact) Reset() { *m = AzureArtifact{} } +func (*AzureArtifact) ProtoMessage() {} +func (*AzureArtifact) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{21} +} +func (m *AzureArtifact) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *AzureArtifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *AzureArtifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_AzureArtifact.Merge(m, src) +} +func (m *AzureArtifact) XXX_Size() int { + return m.Size() +} +func (m *AzureArtifact) XXX_DiscardUnknown() { + xxx_messageInfo_AzureArtifact.DiscardUnknown(m) +} + +var xxx_messageInfo_AzureArtifact proto.InternalMessageInfo + +func (m *AzureArtifactRepository) Reset() { *m = AzureArtifactRepository{} } +func (*AzureArtifactRepository) ProtoMessage() {} +func (*AzureArtifactRepository) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{22} +} +func (m *AzureArtifactRepository) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *AzureArtifactRepository) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *AzureArtifactRepository) XXX_Merge(src proto.Message) { + xxx_messageInfo_AzureArtifactRepository.Merge(m, src) +} +func (m *AzureArtifactRepository) XXX_Size() int { + return m.Size() +} +func (m *AzureArtifactRepository) XXX_DiscardUnknown() { + xxx_messageInfo_AzureArtifactRepository.DiscardUnknown(m) +} + +var xxx_messageInfo_AzureArtifactRepository proto.InternalMessageInfo + +func (m *AzureBlobContainer) Reset() { *m = AzureBlobContainer{} } +func (*AzureBlobContainer) ProtoMessage() {} +func (*AzureBlobContainer) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{23} +} +func (m *AzureBlobContainer) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *AzureBlobContainer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *AzureBlobContainer) XXX_Merge(src proto.Message) { + xxx_messageInfo_AzureBlobContainer.Merge(m, src) +} +func (m *AzureBlobContainer) XXX_Size() int { + return m.Size() +} +func (m *AzureBlobContainer) XXX_DiscardUnknown() { + xxx_messageInfo_AzureBlobContainer.DiscardUnknown(m) +} + +var xxx_messageInfo_AzureBlobContainer proto.InternalMessageInfo + func (m *Backoff) Reset() { *m = Backoff{} } func (*Backoff) ProtoMessage() {} func (*Backoff) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{12} + return fileDescriptor_724696e352c3df5f, []int{24} } func (m *Backoff) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -400,10 +736,38 @@ func (m *Backoff) XXX_DiscardUnknown() { var xxx_messageInfo_Backoff proto.InternalMessageInfo +func (m *BasicAuth) Reset() { *m = BasicAuth{} } +func (*BasicAuth) ProtoMessage() {} +func (*BasicAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{25} +} +func (m *BasicAuth) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *BasicAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *BasicAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_BasicAuth.Merge(m, src) +} +func (m *BasicAuth) XXX_Size() int { + return m.Size() +} +func (m *BasicAuth) XXX_DiscardUnknown() { + xxx_messageInfo_BasicAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_BasicAuth proto.InternalMessageInfo + func (m *Cache) Reset() { *m = Cache{} } func (*Cache) ProtoMessage() {} func (*Cache) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{13} + return fileDescriptor_724696e352c3df5f, []int{26} } func (m *Cache) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -428,10 +792,38 @@ func (m *Cache) XXX_DiscardUnknown() { var xxx_messageInfo_Cache proto.InternalMessageInfo +func (m *ClientCertAuth) Reset() { *m = ClientCertAuth{} } +func (*ClientCertAuth) ProtoMessage() {} +func (*ClientCertAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{27} +} +func (m *ClientCertAuth) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ClientCertAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ClientCertAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientCertAuth.Merge(m, src) +} +func (m *ClientCertAuth) XXX_Size() int { + return m.Size() +} +func (m *ClientCertAuth) XXX_DiscardUnknown() { + xxx_messageInfo_ClientCertAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientCertAuth proto.InternalMessageInfo + func (m *ClusterWorkflowTemplate) Reset() { *m = ClusterWorkflowTemplate{} } func (*ClusterWorkflowTemplate) ProtoMessage() {} func (*ClusterWorkflowTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{14} + return fileDescriptor_724696e352c3df5f, []int{28} } func (m *ClusterWorkflowTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -459,7 +851,7 @@ var xxx_messageInfo_ClusterWorkflowTemplate proto.InternalMessageInfo func (m *ClusterWorkflowTemplateList) Reset() { *m = ClusterWorkflowTemplateList{} } func (*ClusterWorkflowTemplateList) ProtoMessage() {} func (*ClusterWorkflowTemplateList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{15} + return fileDescriptor_724696e352c3df5f, []int{29} } func (m *ClusterWorkflowTemplateList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -487,7 +879,7 @@ var xxx_messageInfo_ClusterWorkflowTemplateList proto.InternalMessageInfo func (m *Condition) Reset() { *m = Condition{} } func (*Condition) ProtoMessage() {} func (*Condition) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{16} + return fileDescriptor_724696e352c3df5f, []int{30} } func (m *Condition) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -515,7 +907,7 @@ var xxx_messageInfo_Condition proto.InternalMessageInfo func (m *ContainerNode) Reset() { *m = ContainerNode{} } func (*ContainerNode) ProtoMessage() {} func (*ContainerNode) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{17} + return fileDescriptor_724696e352c3df5f, []int{31} } func (m *ContainerNode) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -543,7 +935,7 @@ var xxx_messageInfo_ContainerNode proto.InternalMessageInfo func (m *ContainerSetRetryStrategy) Reset() { *m = ContainerSetRetryStrategy{} } func (*ContainerSetRetryStrategy) ProtoMessage() {} func (*ContainerSetRetryStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{18} + return fileDescriptor_724696e352c3df5f, []int{32} } func (m *ContainerSetRetryStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -571,7 +963,7 @@ var xxx_messageInfo_ContainerSetRetryStrategy proto.InternalMessageInfo func (m *ContainerSetTemplate) Reset() { *m = ContainerSetTemplate{} } func (*ContainerSetTemplate) ProtoMessage() {} func (*ContainerSetTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{19} + return fileDescriptor_724696e352c3df5f, []int{33} } func (m *ContainerSetTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -599,7 +991,7 @@ var xxx_messageInfo_ContainerSetTemplate proto.InternalMessageInfo func (m *ContinueOn) Reset() { *m = ContinueOn{} } func (*ContinueOn) ProtoMessage() {} func (*ContinueOn) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{20} + return fileDescriptor_724696e352c3df5f, []int{34} } func (m *ContinueOn) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -627,7 +1019,7 @@ var xxx_messageInfo_ContinueOn proto.InternalMessageInfo func (m *Counter) Reset() { *m = Counter{} } func (*Counter) ProtoMessage() {} func (*Counter) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{21} + return fileDescriptor_724696e352c3df5f, []int{35} } func (m *Counter) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -655,7 +1047,7 @@ var xxx_messageInfo_Counter proto.InternalMessageInfo func (m *CreateS3BucketOptions) Reset() { *m = CreateS3BucketOptions{} } func (*CreateS3BucketOptions) ProtoMessage() {} func (*CreateS3BucketOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{22} + return fileDescriptor_724696e352c3df5f, []int{36} } func (m *CreateS3BucketOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -683,7 +1075,7 @@ var xxx_messageInfo_CreateS3BucketOptions proto.InternalMessageInfo func (m *CronWorkflow) Reset() { *m = CronWorkflow{} } func (*CronWorkflow) ProtoMessage() {} func (*CronWorkflow) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{23} + return fileDescriptor_724696e352c3df5f, []int{37} } func (m *CronWorkflow) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -711,7 +1103,7 @@ var xxx_messageInfo_CronWorkflow proto.InternalMessageInfo func (m *CronWorkflowList) Reset() { *m = CronWorkflowList{} } func (*CronWorkflowList) ProtoMessage() {} func (*CronWorkflowList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{24} + return fileDescriptor_724696e352c3df5f, []int{38} } func (m *CronWorkflowList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -739,7 +1131,7 @@ var xxx_messageInfo_CronWorkflowList proto.InternalMessageInfo func (m *CronWorkflowSpec) Reset() { *m = CronWorkflowSpec{} } func (*CronWorkflowSpec) ProtoMessage() {} func (*CronWorkflowSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{25} + return fileDescriptor_724696e352c3df5f, []int{39} } func (m *CronWorkflowSpec) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -767,7 +1159,7 @@ var xxx_messageInfo_CronWorkflowSpec proto.InternalMessageInfo func (m *CronWorkflowStatus) Reset() { *m = CronWorkflowStatus{} } func (*CronWorkflowStatus) ProtoMessage() {} func (*CronWorkflowStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{26} + return fileDescriptor_724696e352c3df5f, []int{40} } func (m *CronWorkflowStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -795,7 +1187,7 @@ var xxx_messageInfo_CronWorkflowStatus proto.InternalMessageInfo func (m *DAGTask) Reset() { *m = DAGTask{} } func (*DAGTask) ProtoMessage() {} func (*DAGTask) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{27} + return fileDescriptor_724696e352c3df5f, []int{41} } func (m *DAGTask) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -823,7 +1215,7 @@ var xxx_messageInfo_DAGTask proto.InternalMessageInfo func (m *DAGTemplate) Reset() { *m = DAGTemplate{} } func (*DAGTemplate) ProtoMessage() {} func (*DAGTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{28} + return fileDescriptor_724696e352c3df5f, []int{42} } func (m *DAGTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -851,7 +1243,7 @@ var xxx_messageInfo_DAGTemplate proto.InternalMessageInfo func (m *Data) Reset() { *m = Data{} } func (*Data) ProtoMessage() {} func (*Data) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{29} + return fileDescriptor_724696e352c3df5f, []int{43} } func (m *Data) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -879,7 +1271,7 @@ var xxx_messageInfo_Data proto.InternalMessageInfo func (m *DataSource) Reset() { *m = DataSource{} } func (*DataSource) ProtoMessage() {} func (*DataSource) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{30} + return fileDescriptor_724696e352c3df5f, []int{44} } func (m *DataSource) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -907,7 +1299,7 @@ var xxx_messageInfo_DataSource proto.InternalMessageInfo func (m *Event) Reset() { *m = Event{} } func (*Event) ProtoMessage() {} func (*Event) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{31} + return fileDescriptor_724696e352c3df5f, []int{45} } func (m *Event) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -935,7 +1327,7 @@ var xxx_messageInfo_Event proto.InternalMessageInfo func (m *ExecutorConfig) Reset() { *m = ExecutorConfig{} } func (*ExecutorConfig) ProtoMessage() {} func (*ExecutorConfig) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{32} + return fileDescriptor_724696e352c3df5f, []int{46} } func (m *ExecutorConfig) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -963,7 +1355,7 @@ var xxx_messageInfo_ExecutorConfig proto.InternalMessageInfo func (m *GCSArtifact) Reset() { *m = GCSArtifact{} } func (*GCSArtifact) ProtoMessage() {} func (*GCSArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{33} + return fileDescriptor_724696e352c3df5f, []int{47} } func (m *GCSArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -991,7 +1383,7 @@ var xxx_messageInfo_GCSArtifact proto.InternalMessageInfo func (m *GCSArtifactRepository) Reset() { *m = GCSArtifactRepository{} } func (*GCSArtifactRepository) ProtoMessage() {} func (*GCSArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{34} + return fileDescriptor_724696e352c3df5f, []int{48} } func (m *GCSArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1019,7 +1411,7 @@ var xxx_messageInfo_GCSArtifactRepository proto.InternalMessageInfo func (m *GCSBucket) Reset() { *m = GCSBucket{} } func (*GCSBucket) ProtoMessage() {} func (*GCSBucket) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{35} + return fileDescriptor_724696e352c3df5f, []int{49} } func (m *GCSBucket) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1047,7 +1439,7 @@ var xxx_messageInfo_GCSBucket proto.InternalMessageInfo func (m *Gauge) Reset() { *m = Gauge{} } func (*Gauge) ProtoMessage() {} func (*Gauge) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{36} + return fileDescriptor_724696e352c3df5f, []int{50} } func (m *Gauge) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1075,7 +1467,7 @@ var xxx_messageInfo_Gauge proto.InternalMessageInfo func (m *GitArtifact) Reset() { *m = GitArtifact{} } func (*GitArtifact) ProtoMessage() {} func (*GitArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{37} + return fileDescriptor_724696e352c3df5f, []int{51} } func (m *GitArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1103,7 +1495,7 @@ var xxx_messageInfo_GitArtifact proto.InternalMessageInfo func (m *HDFSArtifact) Reset() { *m = HDFSArtifact{} } func (*HDFSArtifact) ProtoMessage() {} func (*HDFSArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{38} + return fileDescriptor_724696e352c3df5f, []int{52} } func (m *HDFSArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1131,7 +1523,7 @@ var xxx_messageInfo_HDFSArtifact proto.InternalMessageInfo func (m *HDFSArtifactRepository) Reset() { *m = HDFSArtifactRepository{} } func (*HDFSArtifactRepository) ProtoMessage() {} func (*HDFSArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{39} + return fileDescriptor_724696e352c3df5f, []int{53} } func (m *HDFSArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1159,7 +1551,7 @@ var xxx_messageInfo_HDFSArtifactRepository proto.InternalMessageInfo func (m *HDFSConfig) Reset() { *m = HDFSConfig{} } func (*HDFSConfig) ProtoMessage() {} func (*HDFSConfig) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{40} + return fileDescriptor_724696e352c3df5f, []int{54} } func (m *HDFSConfig) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1187,7 +1579,7 @@ var xxx_messageInfo_HDFSConfig proto.InternalMessageInfo func (m *HDFSKrbConfig) Reset() { *m = HDFSKrbConfig{} } func (*HDFSKrbConfig) ProtoMessage() {} func (*HDFSKrbConfig) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{41} + return fileDescriptor_724696e352c3df5f, []int{55} } func (m *HDFSKrbConfig) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1215,7 +1607,7 @@ var xxx_messageInfo_HDFSKrbConfig proto.InternalMessageInfo func (m *HTTP) Reset() { *m = HTTP{} } func (*HTTP) ProtoMessage() {} func (*HTTP) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{42} + return fileDescriptor_724696e352c3df5f, []int{56} } func (m *HTTP) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1243,7 +1635,7 @@ var xxx_messageInfo_HTTP proto.InternalMessageInfo func (m *HTTPArtifact) Reset() { *m = HTTPArtifact{} } func (*HTTPArtifact) ProtoMessage() {} func (*HTTPArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{43} + return fileDescriptor_724696e352c3df5f, []int{57} } func (m *HTTPArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1268,10 +1660,66 @@ func (m *HTTPArtifact) XXX_DiscardUnknown() { var xxx_messageInfo_HTTPArtifact proto.InternalMessageInfo +func (m *HTTPAuth) Reset() { *m = HTTPAuth{} } +func (*HTTPAuth) ProtoMessage() {} +func (*HTTPAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{58} +} +func (m *HTTPAuth) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *HTTPAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *HTTPAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_HTTPAuth.Merge(m, src) +} +func (m *HTTPAuth) XXX_Size() int { + return m.Size() +} +func (m *HTTPAuth) XXX_DiscardUnknown() { + xxx_messageInfo_HTTPAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_HTTPAuth proto.InternalMessageInfo + +func (m *HTTPBodySource) Reset() { *m = HTTPBodySource{} } +func (*HTTPBodySource) ProtoMessage() {} +func (*HTTPBodySource) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{59} +} +func (m *HTTPBodySource) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *HTTPBodySource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *HTTPBodySource) XXX_Merge(src proto.Message) { + xxx_messageInfo_HTTPBodySource.Merge(m, src) +} +func (m *HTTPBodySource) XXX_Size() int { + return m.Size() +} +func (m *HTTPBodySource) XXX_DiscardUnknown() { + xxx_messageInfo_HTTPBodySource.DiscardUnknown(m) +} + +var xxx_messageInfo_HTTPBodySource proto.InternalMessageInfo + func (m *HTTPHeader) Reset() { *m = HTTPHeader{} } func (*HTTPHeader) ProtoMessage() {} func (*HTTPHeader) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{44} + return fileDescriptor_724696e352c3df5f, []int{60} } func (m *HTTPHeader) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1299,7 +1747,7 @@ var xxx_messageInfo_HTTPHeader proto.InternalMessageInfo func (m *HTTPHeaderSource) Reset() { *m = HTTPHeaderSource{} } func (*HTTPHeaderSource) ProtoMessage() {} func (*HTTPHeaderSource) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{45} + return fileDescriptor_724696e352c3df5f, []int{61} } func (m *HTTPHeaderSource) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1327,7 +1775,7 @@ var xxx_messageInfo_HTTPHeaderSource proto.InternalMessageInfo func (m *Header) Reset() { *m = Header{} } func (*Header) ProtoMessage() {} func (*Header) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{46} + return fileDescriptor_724696e352c3df5f, []int{62} } func (m *Header) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1355,7 +1803,7 @@ var xxx_messageInfo_Header proto.InternalMessageInfo func (m *Histogram) Reset() { *m = Histogram{} } func (*Histogram) ProtoMessage() {} func (*Histogram) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{47} + return fileDescriptor_724696e352c3df5f, []int{63} } func (m *Histogram) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1383,7 +1831,7 @@ var xxx_messageInfo_Histogram proto.InternalMessageInfo func (m *Inputs) Reset() { *m = Inputs{} } func (*Inputs) ProtoMessage() {} func (*Inputs) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{48} + return fileDescriptor_724696e352c3df5f, []int{64} } func (m *Inputs) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1411,7 +1859,7 @@ var xxx_messageInfo_Inputs proto.InternalMessageInfo func (m *Item) Reset() { *m = Item{} } func (*Item) ProtoMessage() {} func (*Item) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{49} + return fileDescriptor_724696e352c3df5f, []int{65} } func (m *Item) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1439,7 +1887,7 @@ var xxx_messageInfo_Item proto.InternalMessageInfo func (m *LabelKeys) Reset() { *m = LabelKeys{} } func (*LabelKeys) ProtoMessage() {} func (*LabelKeys) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{50} + return fileDescriptor_724696e352c3df5f, []int{66} } func (m *LabelKeys) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1467,7 +1915,7 @@ var xxx_messageInfo_LabelKeys proto.InternalMessageInfo func (m *LabelValueFrom) Reset() { *m = LabelValueFrom{} } func (*LabelValueFrom) ProtoMessage() {} func (*LabelValueFrom) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{51} + return fileDescriptor_724696e352c3df5f, []int{67} } func (m *LabelValueFrom) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1495,7 +1943,7 @@ var xxx_messageInfo_LabelValueFrom proto.InternalMessageInfo func (m *LabelValues) Reset() { *m = LabelValues{} } func (*LabelValues) ProtoMessage() {} func (*LabelValues) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{52} + return fileDescriptor_724696e352c3df5f, []int{68} } func (m *LabelValues) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1523,7 +1971,7 @@ var xxx_messageInfo_LabelValues proto.InternalMessageInfo func (m *LifecycleHook) Reset() { *m = LifecycleHook{} } func (*LifecycleHook) ProtoMessage() {} func (*LifecycleHook) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{53} + return fileDescriptor_724696e352c3df5f, []int{69} } func (m *LifecycleHook) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1551,7 +1999,7 @@ var xxx_messageInfo_LifecycleHook proto.InternalMessageInfo func (m *Link) Reset() { *m = Link{} } func (*Link) ProtoMessage() {} func (*Link) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{54} + return fileDescriptor_724696e352c3df5f, []int{70} } func (m *Link) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1576,10 +2024,38 @@ func (m *Link) XXX_DiscardUnknown() { var xxx_messageInfo_Link proto.InternalMessageInfo +func (m *ManifestFrom) Reset() { *m = ManifestFrom{} } +func (*ManifestFrom) ProtoMessage() {} +func (*ManifestFrom) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{71} +} +func (m *ManifestFrom) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ManifestFrom) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *ManifestFrom) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManifestFrom.Merge(m, src) +} +func (m *ManifestFrom) XXX_Size() int { + return m.Size() +} +func (m *ManifestFrom) XXX_DiscardUnknown() { + xxx_messageInfo_ManifestFrom.DiscardUnknown(m) +} + +var xxx_messageInfo_ManifestFrom proto.InternalMessageInfo + func (m *MemoizationStatus) Reset() { *m = MemoizationStatus{} } func (*MemoizationStatus) ProtoMessage() {} func (*MemoizationStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{55} + return fileDescriptor_724696e352c3df5f, []int{72} } func (m *MemoizationStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1607,7 +2083,7 @@ var xxx_messageInfo_MemoizationStatus proto.InternalMessageInfo func (m *Memoize) Reset() { *m = Memoize{} } func (*Memoize) ProtoMessage() {} func (*Memoize) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{56} + return fileDescriptor_724696e352c3df5f, []int{73} } func (m *Memoize) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1635,7 +2111,7 @@ var xxx_messageInfo_Memoize proto.InternalMessageInfo func (m *Metadata) Reset() { *m = Metadata{} } func (*Metadata) ProtoMessage() {} func (*Metadata) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{57} + return fileDescriptor_724696e352c3df5f, []int{74} } func (m *Metadata) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1663,7 +2139,7 @@ var xxx_messageInfo_Metadata proto.InternalMessageInfo func (m *MetricLabel) Reset() { *m = MetricLabel{} } func (*MetricLabel) ProtoMessage() {} func (*MetricLabel) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{58} + return fileDescriptor_724696e352c3df5f, []int{75} } func (m *MetricLabel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1691,7 +2167,7 @@ var xxx_messageInfo_MetricLabel proto.InternalMessageInfo func (m *Metrics) Reset() { *m = Metrics{} } func (*Metrics) ProtoMessage() {} func (*Metrics) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{59} + return fileDescriptor_724696e352c3df5f, []int{76} } func (m *Metrics) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1719,7 +2195,7 @@ var xxx_messageInfo_Metrics proto.InternalMessageInfo func (m *Mutex) Reset() { *m = Mutex{} } func (*Mutex) ProtoMessage() {} func (*Mutex) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{60} + return fileDescriptor_724696e352c3df5f, []int{77} } func (m *Mutex) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1747,7 +2223,7 @@ var xxx_messageInfo_Mutex proto.InternalMessageInfo func (m *MutexHolding) Reset() { *m = MutexHolding{} } func (*MutexHolding) ProtoMessage() {} func (*MutexHolding) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{61} + return fileDescriptor_724696e352c3df5f, []int{78} } func (m *MutexHolding) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1775,7 +2251,7 @@ var xxx_messageInfo_MutexHolding proto.InternalMessageInfo func (m *MutexStatus) Reset() { *m = MutexStatus{} } func (*MutexStatus) ProtoMessage() {} func (*MutexStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{62} + return fileDescriptor_724696e352c3df5f, []int{79} } func (m *MutexStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1803,7 +2279,7 @@ var xxx_messageInfo_MutexStatus proto.InternalMessageInfo func (m *NodeResult) Reset() { *m = NodeResult{} } func (*NodeResult) ProtoMessage() {} func (*NodeResult) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{63} + return fileDescriptor_724696e352c3df5f, []int{80} } func (m *NodeResult) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1831,7 +2307,7 @@ var xxx_messageInfo_NodeResult proto.InternalMessageInfo func (m *NodeStatus) Reset() { *m = NodeStatus{} } func (*NodeStatus) ProtoMessage() {} func (*NodeStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{64} + return fileDescriptor_724696e352c3df5f, []int{81} } func (m *NodeStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1859,7 +2335,7 @@ var xxx_messageInfo_NodeStatus proto.InternalMessageInfo func (m *NodeSynchronizationStatus) Reset() { *m = NodeSynchronizationStatus{} } func (*NodeSynchronizationStatus) ProtoMessage() {} func (*NodeSynchronizationStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{65} + return fileDescriptor_724696e352c3df5f, []int{82} } func (m *NodeSynchronizationStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1887,7 +2363,7 @@ var xxx_messageInfo_NodeSynchronizationStatus proto.InternalMessageInfo func (m *NoneStrategy) Reset() { *m = NoneStrategy{} } func (*NoneStrategy) ProtoMessage() {} func (*NoneStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{66} + return fileDescriptor_724696e352c3df5f, []int{83} } func (m *NoneStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1912,10 +2388,66 @@ func (m *NoneStrategy) XXX_DiscardUnknown() { var xxx_messageInfo_NoneStrategy proto.InternalMessageInfo +func (m *OAuth2Auth) Reset() { *m = OAuth2Auth{} } +func (*OAuth2Auth) ProtoMessage() {} +func (*OAuth2Auth) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{84} +} +func (m *OAuth2Auth) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *OAuth2Auth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *OAuth2Auth) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuth2Auth.Merge(m, src) +} +func (m *OAuth2Auth) XXX_Size() int { + return m.Size() +} +func (m *OAuth2Auth) XXX_DiscardUnknown() { + xxx_messageInfo_OAuth2Auth.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuth2Auth proto.InternalMessageInfo + +func (m *OAuth2EndpointParam) Reset() { *m = OAuth2EndpointParam{} } +func (*OAuth2EndpointParam) ProtoMessage() {} +func (*OAuth2EndpointParam) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{85} +} +func (m *OAuth2EndpointParam) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *OAuth2EndpointParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *OAuth2EndpointParam) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuth2EndpointParam.Merge(m, src) +} +func (m *OAuth2EndpointParam) XXX_Size() int { + return m.Size() +} +func (m *OAuth2EndpointParam) XXX_DiscardUnknown() { + xxx_messageInfo_OAuth2EndpointParam.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuth2EndpointParam proto.InternalMessageInfo + func (m *OSSArtifact) Reset() { *m = OSSArtifact{} } func (*OSSArtifact) ProtoMessage() {} func (*OSSArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{67} + return fileDescriptor_724696e352c3df5f, []int{86} } func (m *OSSArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1943,7 +2475,7 @@ var xxx_messageInfo_OSSArtifact proto.InternalMessageInfo func (m *OSSArtifactRepository) Reset() { *m = OSSArtifactRepository{} } func (*OSSArtifactRepository) ProtoMessage() {} func (*OSSArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{68} + return fileDescriptor_724696e352c3df5f, []int{87} } func (m *OSSArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1971,7 +2503,7 @@ var xxx_messageInfo_OSSArtifactRepository proto.InternalMessageInfo func (m *OSSBucket) Reset() { *m = OSSBucket{} } func (*OSSBucket) ProtoMessage() {} func (*OSSBucket) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{69} + return fileDescriptor_724696e352c3df5f, []int{88} } func (m *OSSBucket) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1999,7 +2531,7 @@ var xxx_messageInfo_OSSBucket proto.InternalMessageInfo func (m *OSSLifecycleRule) Reset() { *m = OSSLifecycleRule{} } func (*OSSLifecycleRule) ProtoMessage() {} func (*OSSLifecycleRule) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{70} + return fileDescriptor_724696e352c3df5f, []int{89} } func (m *OSSLifecycleRule) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2027,7 +2559,7 @@ var xxx_messageInfo_OSSLifecycleRule proto.InternalMessageInfo func (m *Object) Reset() { *m = Object{} } func (*Object) ProtoMessage() {} func (*Object) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{71} + return fileDescriptor_724696e352c3df5f, []int{90} } func (m *Object) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2055,7 +2587,7 @@ var xxx_messageInfo_Object proto.InternalMessageInfo func (m *Outputs) Reset() { *m = Outputs{} } func (*Outputs) ProtoMessage() {} func (*Outputs) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{72} + return fileDescriptor_724696e352c3df5f, []int{91} } func (m *Outputs) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2083,7 +2615,7 @@ var xxx_messageInfo_Outputs proto.InternalMessageInfo func (m *ParallelSteps) Reset() { *m = ParallelSteps{} } func (*ParallelSteps) ProtoMessage() {} func (*ParallelSteps) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{73} + return fileDescriptor_724696e352c3df5f, []int{92} } func (m *ParallelSteps) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2111,7 +2643,7 @@ var xxx_messageInfo_ParallelSteps proto.InternalMessageInfo func (m *Parameter) Reset() { *m = Parameter{} } func (*Parameter) ProtoMessage() {} func (*Parameter) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{74} + return fileDescriptor_724696e352c3df5f, []int{93} } func (m *Parameter) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2139,7 +2671,7 @@ var xxx_messageInfo_Parameter proto.InternalMessageInfo func (m *Plugin) Reset() { *m = Plugin{} } func (*Plugin) ProtoMessage() {} func (*Plugin) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{75} + return fileDescriptor_724696e352c3df5f, []int{94} } func (m *Plugin) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2167,7 +2699,7 @@ var xxx_messageInfo_Plugin proto.InternalMessageInfo func (m *PodGC) Reset() { *m = PodGC{} } func (*PodGC) ProtoMessage() {} func (*PodGC) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{76} + return fileDescriptor_724696e352c3df5f, []int{95} } func (m *PodGC) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2195,7 +2727,7 @@ var xxx_messageInfo_PodGC proto.InternalMessageInfo func (m *Prometheus) Reset() { *m = Prometheus{} } func (*Prometheus) ProtoMessage() {} func (*Prometheus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{77} + return fileDescriptor_724696e352c3df5f, []int{96} } func (m *Prometheus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2223,7 +2755,7 @@ var xxx_messageInfo_Prometheus proto.InternalMessageInfo func (m *RawArtifact) Reset() { *m = RawArtifact{} } func (*RawArtifact) ProtoMessage() {} func (*RawArtifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{78} + return fileDescriptor_724696e352c3df5f, []int{97} } func (m *RawArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2251,7 +2783,7 @@ var xxx_messageInfo_RawArtifact proto.InternalMessageInfo func (m *ResourceTemplate) Reset() { *m = ResourceTemplate{} } func (*ResourceTemplate) ProtoMessage() {} func (*ResourceTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{79} + return fileDescriptor_724696e352c3df5f, []int{98} } func (m *ResourceTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2279,7 +2811,7 @@ var xxx_messageInfo_ResourceTemplate proto.InternalMessageInfo func (m *RetryAffinity) Reset() { *m = RetryAffinity{} } func (*RetryAffinity) ProtoMessage() {} func (*RetryAffinity) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{80} + return fileDescriptor_724696e352c3df5f, []int{99} } func (m *RetryAffinity) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2307,7 +2839,7 @@ var xxx_messageInfo_RetryAffinity proto.InternalMessageInfo func (m *RetryNodeAntiAffinity) Reset() { *m = RetryNodeAntiAffinity{} } func (*RetryNodeAntiAffinity) ProtoMessage() {} func (*RetryNodeAntiAffinity) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{81} + return fileDescriptor_724696e352c3df5f, []int{100} } func (m *RetryNodeAntiAffinity) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2335,7 +2867,7 @@ var xxx_messageInfo_RetryNodeAntiAffinity proto.InternalMessageInfo func (m *RetryStrategy) Reset() { *m = RetryStrategy{} } func (*RetryStrategy) ProtoMessage() {} func (*RetryStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{82} + return fileDescriptor_724696e352c3df5f, []int{101} } func (m *RetryStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2363,7 +2895,7 @@ var xxx_messageInfo_RetryStrategy proto.InternalMessageInfo func (m *S3Artifact) Reset() { *m = S3Artifact{} } func (*S3Artifact) ProtoMessage() {} func (*S3Artifact) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{83} + return fileDescriptor_724696e352c3df5f, []int{102} } func (m *S3Artifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2391,7 +2923,7 @@ var xxx_messageInfo_S3Artifact proto.InternalMessageInfo func (m *S3ArtifactRepository) Reset() { *m = S3ArtifactRepository{} } func (*S3ArtifactRepository) ProtoMessage() {} func (*S3ArtifactRepository) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{84} + return fileDescriptor_724696e352c3df5f, []int{103} } func (m *S3ArtifactRepository) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2419,7 +2951,7 @@ var xxx_messageInfo_S3ArtifactRepository proto.InternalMessageInfo func (m *S3Bucket) Reset() { *m = S3Bucket{} } func (*S3Bucket) ProtoMessage() {} func (*S3Bucket) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{85} + return fileDescriptor_724696e352c3df5f, []int{104} } func (m *S3Bucket) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2447,7 +2979,7 @@ var xxx_messageInfo_S3Bucket proto.InternalMessageInfo func (m *S3EncryptionOptions) Reset() { *m = S3EncryptionOptions{} } func (*S3EncryptionOptions) ProtoMessage() {} func (*S3EncryptionOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{86} + return fileDescriptor_724696e352c3df5f, []int{105} } func (m *S3EncryptionOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2475,7 +3007,7 @@ var xxx_messageInfo_S3EncryptionOptions proto.InternalMessageInfo func (m *ScriptTemplate) Reset() { *m = ScriptTemplate{} } func (*ScriptTemplate) ProtoMessage() {} func (*ScriptTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{87} + return fileDescriptor_724696e352c3df5f, []int{106} } func (m *ScriptTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2503,7 +3035,7 @@ var xxx_messageInfo_ScriptTemplate proto.InternalMessageInfo func (m *SemaphoreHolding) Reset() { *m = SemaphoreHolding{} } func (*SemaphoreHolding) ProtoMessage() {} func (*SemaphoreHolding) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{88} + return fileDescriptor_724696e352c3df5f, []int{107} } func (m *SemaphoreHolding) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2531,7 +3063,7 @@ var xxx_messageInfo_SemaphoreHolding proto.InternalMessageInfo func (m *SemaphoreRef) Reset() { *m = SemaphoreRef{} } func (*SemaphoreRef) ProtoMessage() {} func (*SemaphoreRef) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{89} + return fileDescriptor_724696e352c3df5f, []int{108} } func (m *SemaphoreRef) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2559,7 +3091,7 @@ var xxx_messageInfo_SemaphoreRef proto.InternalMessageInfo func (m *SemaphoreStatus) Reset() { *m = SemaphoreStatus{} } func (*SemaphoreStatus) ProtoMessage() {} func (*SemaphoreStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{90} + return fileDescriptor_724696e352c3df5f, []int{109} } func (m *SemaphoreStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2587,7 +3119,7 @@ var xxx_messageInfo_SemaphoreStatus proto.InternalMessageInfo func (m *Sequence) Reset() { *m = Sequence{} } func (*Sequence) ProtoMessage() {} func (*Sequence) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{91} + return fileDescriptor_724696e352c3df5f, []int{110} } func (m *Sequence) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2615,7 +3147,7 @@ var xxx_messageInfo_Sequence proto.InternalMessageInfo func (m *Submit) Reset() { *m = Submit{} } func (*Submit) ProtoMessage() {} func (*Submit) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{92} + return fileDescriptor_724696e352c3df5f, []int{111} } func (m *Submit) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2643,7 +3175,7 @@ var xxx_messageInfo_Submit proto.InternalMessageInfo func (m *SubmitOpts) Reset() { *m = SubmitOpts{} } func (*SubmitOpts) ProtoMessage() {} func (*SubmitOpts) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{93} + return fileDescriptor_724696e352c3df5f, []int{112} } func (m *SubmitOpts) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2671,7 +3203,7 @@ var xxx_messageInfo_SubmitOpts proto.InternalMessageInfo func (m *SuppliedValueFrom) Reset() { *m = SuppliedValueFrom{} } func (*SuppliedValueFrom) ProtoMessage() {} func (*SuppliedValueFrom) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{94} + return fileDescriptor_724696e352c3df5f, []int{113} } func (m *SuppliedValueFrom) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2699,7 +3231,7 @@ var xxx_messageInfo_SuppliedValueFrom proto.InternalMessageInfo func (m *SuspendTemplate) Reset() { *m = SuspendTemplate{} } func (*SuspendTemplate) ProtoMessage() {} func (*SuspendTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{95} + return fileDescriptor_724696e352c3df5f, []int{114} } func (m *SuspendTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2727,7 +3259,7 @@ var xxx_messageInfo_SuspendTemplate proto.InternalMessageInfo func (m *Synchronization) Reset() { *m = Synchronization{} } func (*Synchronization) ProtoMessage() {} func (*Synchronization) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{96} + return fileDescriptor_724696e352c3df5f, []int{115} } func (m *Synchronization) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2755,7 +3287,7 @@ var xxx_messageInfo_Synchronization proto.InternalMessageInfo func (m *SynchronizationStatus) Reset() { *m = SynchronizationStatus{} } func (*SynchronizationStatus) ProtoMessage() {} func (*SynchronizationStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{97} + return fileDescriptor_724696e352c3df5f, []int{116} } func (m *SynchronizationStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2783,7 +3315,7 @@ var xxx_messageInfo_SynchronizationStatus proto.InternalMessageInfo func (m *TTLStrategy) Reset() { *m = TTLStrategy{} } func (*TTLStrategy) ProtoMessage() {} func (*TTLStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{98} + return fileDescriptor_724696e352c3df5f, []int{117} } func (m *TTLStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2811,7 +3343,7 @@ var xxx_messageInfo_TTLStrategy proto.InternalMessageInfo func (m *TarStrategy) Reset() { *m = TarStrategy{} } func (*TarStrategy) ProtoMessage() {} func (*TarStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{99} + return fileDescriptor_724696e352c3df5f, []int{118} } func (m *TarStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2839,7 +3371,7 @@ var xxx_messageInfo_TarStrategy proto.InternalMessageInfo func (m *Template) Reset() { *m = Template{} } func (*Template) ProtoMessage() {} func (*Template) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{100} + return fileDescriptor_724696e352c3df5f, []int{119} } func (m *Template) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2867,7 +3399,7 @@ var xxx_messageInfo_Template proto.InternalMessageInfo func (m *TemplateRef) Reset() { *m = TemplateRef{} } func (*TemplateRef) ProtoMessage() {} func (*TemplateRef) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{101} + return fileDescriptor_724696e352c3df5f, []int{120} } func (m *TemplateRef) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2895,7 +3427,7 @@ var xxx_messageInfo_TemplateRef proto.InternalMessageInfo func (m *TransformationStep) Reset() { *m = TransformationStep{} } func (*TransformationStep) ProtoMessage() {} func (*TransformationStep) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{102} + return fileDescriptor_724696e352c3df5f, []int{121} } func (m *TransformationStep) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2923,7 +3455,7 @@ var xxx_messageInfo_TransformationStep proto.InternalMessageInfo func (m *UserContainer) Reset() { *m = UserContainer{} } func (*UserContainer) ProtoMessage() {} func (*UserContainer) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{103} + return fileDescriptor_724696e352c3df5f, []int{122} } func (m *UserContainer) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2951,7 +3483,7 @@ var xxx_messageInfo_UserContainer proto.InternalMessageInfo func (m *ValueFrom) Reset() { *m = ValueFrom{} } func (*ValueFrom) ProtoMessage() {} func (*ValueFrom) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{104} + return fileDescriptor_724696e352c3df5f, []int{123} } func (m *ValueFrom) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2979,7 +3511,7 @@ var xxx_messageInfo_ValueFrom proto.InternalMessageInfo func (m *Version) Reset() { *m = Version{} } func (*Version) ProtoMessage() {} func (*Version) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{105} + return fileDescriptor_724696e352c3df5f, []int{124} } func (m *Version) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3007,7 +3539,7 @@ var xxx_messageInfo_Version proto.InternalMessageInfo func (m *VolumeClaimGC) Reset() { *m = VolumeClaimGC{} } func (*VolumeClaimGC) ProtoMessage() {} func (*VolumeClaimGC) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{106} + return fileDescriptor_724696e352c3df5f, []int{125} } func (m *VolumeClaimGC) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3035,7 +3567,7 @@ var xxx_messageInfo_VolumeClaimGC proto.InternalMessageInfo func (m *Workflow) Reset() { *m = Workflow{} } func (*Workflow) ProtoMessage() {} func (*Workflow) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{107} + return fileDescriptor_724696e352c3df5f, []int{126} } func (m *Workflow) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3060,10 +3592,66 @@ func (m *Workflow) XXX_DiscardUnknown() { var xxx_messageInfo_Workflow proto.InternalMessageInfo +func (m *WorkflowArtifactGCTask) Reset() { *m = WorkflowArtifactGCTask{} } +func (*WorkflowArtifactGCTask) ProtoMessage() {} +func (*WorkflowArtifactGCTask) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{127} +} +func (m *WorkflowArtifactGCTask) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *WorkflowArtifactGCTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *WorkflowArtifactGCTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowArtifactGCTask.Merge(m, src) +} +func (m *WorkflowArtifactGCTask) XXX_Size() int { + return m.Size() +} +func (m *WorkflowArtifactGCTask) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowArtifactGCTask.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowArtifactGCTask proto.InternalMessageInfo + +func (m *WorkflowArtifactGCTaskList) Reset() { *m = WorkflowArtifactGCTaskList{} } +func (*WorkflowArtifactGCTaskList) ProtoMessage() {} +func (*WorkflowArtifactGCTaskList) Descriptor() ([]byte, []int) { + return fileDescriptor_724696e352c3df5f, []int{128} +} +func (m *WorkflowArtifactGCTaskList) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *WorkflowArtifactGCTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *WorkflowArtifactGCTaskList) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowArtifactGCTaskList.Merge(m, src) +} +func (m *WorkflowArtifactGCTaskList) XXX_Size() int { + return m.Size() +} +func (m *WorkflowArtifactGCTaskList) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowArtifactGCTaskList.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowArtifactGCTaskList proto.InternalMessageInfo + func (m *WorkflowEventBinding) Reset() { *m = WorkflowEventBinding{} } func (*WorkflowEventBinding) ProtoMessage() {} func (*WorkflowEventBinding) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{108} + return fileDescriptor_724696e352c3df5f, []int{129} } func (m *WorkflowEventBinding) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3091,7 +3679,7 @@ var xxx_messageInfo_WorkflowEventBinding proto.InternalMessageInfo func (m *WorkflowEventBindingList) Reset() { *m = WorkflowEventBindingList{} } func (*WorkflowEventBindingList) ProtoMessage() {} func (*WorkflowEventBindingList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{109} + return fileDescriptor_724696e352c3df5f, []int{130} } func (m *WorkflowEventBindingList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3119,7 +3707,7 @@ var xxx_messageInfo_WorkflowEventBindingList proto.InternalMessageInfo func (m *WorkflowEventBindingSpec) Reset() { *m = WorkflowEventBindingSpec{} } func (*WorkflowEventBindingSpec) ProtoMessage() {} func (*WorkflowEventBindingSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{110} + return fileDescriptor_724696e352c3df5f, []int{131} } func (m *WorkflowEventBindingSpec) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3147,7 +3735,7 @@ var xxx_messageInfo_WorkflowEventBindingSpec proto.InternalMessageInfo func (m *WorkflowList) Reset() { *m = WorkflowList{} } func (*WorkflowList) ProtoMessage() {} func (*WorkflowList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{111} + return fileDescriptor_724696e352c3df5f, []int{132} } func (m *WorkflowList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3175,7 +3763,7 @@ var xxx_messageInfo_WorkflowList proto.InternalMessageInfo func (m *WorkflowMetadata) Reset() { *m = WorkflowMetadata{} } func (*WorkflowMetadata) ProtoMessage() {} func (*WorkflowMetadata) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{112} + return fileDescriptor_724696e352c3df5f, []int{133} } func (m *WorkflowMetadata) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3203,7 +3791,7 @@ var xxx_messageInfo_WorkflowMetadata proto.InternalMessageInfo func (m *WorkflowSpec) Reset() { *m = WorkflowSpec{} } func (*WorkflowSpec) ProtoMessage() {} func (*WorkflowSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{113} + return fileDescriptor_724696e352c3df5f, []int{134} } func (m *WorkflowSpec) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3231,7 +3819,7 @@ var xxx_messageInfo_WorkflowSpec proto.InternalMessageInfo func (m *WorkflowStatus) Reset() { *m = WorkflowStatus{} } func (*WorkflowStatus) ProtoMessage() {} func (*WorkflowStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{114} + return fileDescriptor_724696e352c3df5f, []int{135} } func (m *WorkflowStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3259,7 +3847,7 @@ var xxx_messageInfo_WorkflowStatus proto.InternalMessageInfo func (m *WorkflowStep) Reset() { *m = WorkflowStep{} } func (*WorkflowStep) ProtoMessage() {} func (*WorkflowStep) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{115} + return fileDescriptor_724696e352c3df5f, []int{136} } func (m *WorkflowStep) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3287,7 +3875,7 @@ var xxx_messageInfo_WorkflowStep proto.InternalMessageInfo func (m *WorkflowTaskResult) Reset() { *m = WorkflowTaskResult{} } func (*WorkflowTaskResult) ProtoMessage() {} func (*WorkflowTaskResult) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{116} + return fileDescriptor_724696e352c3df5f, []int{137} } func (m *WorkflowTaskResult) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3315,7 +3903,7 @@ var xxx_messageInfo_WorkflowTaskResult proto.InternalMessageInfo func (m *WorkflowTaskResultList) Reset() { *m = WorkflowTaskResultList{} } func (*WorkflowTaskResultList) ProtoMessage() {} func (*WorkflowTaskResultList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{117} + return fileDescriptor_724696e352c3df5f, []int{138} } func (m *WorkflowTaskResultList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3343,7 +3931,7 @@ var xxx_messageInfo_WorkflowTaskResultList proto.InternalMessageInfo func (m *WorkflowTaskSet) Reset() { *m = WorkflowTaskSet{} } func (*WorkflowTaskSet) ProtoMessage() {} func (*WorkflowTaskSet) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{118} + return fileDescriptor_724696e352c3df5f, []int{139} } func (m *WorkflowTaskSet) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3371,7 +3959,7 @@ var xxx_messageInfo_WorkflowTaskSet proto.InternalMessageInfo func (m *WorkflowTaskSetList) Reset() { *m = WorkflowTaskSetList{} } func (*WorkflowTaskSetList) ProtoMessage() {} func (*WorkflowTaskSetList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{119} + return fileDescriptor_724696e352c3df5f, []int{140} } func (m *WorkflowTaskSetList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3399,7 +3987,7 @@ var xxx_messageInfo_WorkflowTaskSetList proto.InternalMessageInfo func (m *WorkflowTaskSetSpec) Reset() { *m = WorkflowTaskSetSpec{} } func (*WorkflowTaskSetSpec) ProtoMessage() {} func (*WorkflowTaskSetSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{120} + return fileDescriptor_724696e352c3df5f, []int{141} } func (m *WorkflowTaskSetSpec) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3427,7 +4015,7 @@ var xxx_messageInfo_WorkflowTaskSetSpec proto.InternalMessageInfo func (m *WorkflowTaskSetStatus) Reset() { *m = WorkflowTaskSetStatus{} } func (*WorkflowTaskSetStatus) ProtoMessage() {} func (*WorkflowTaskSetStatus) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{121} + return fileDescriptor_724696e352c3df5f, []int{142} } func (m *WorkflowTaskSetStatus) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3455,7 +4043,7 @@ var xxx_messageInfo_WorkflowTaskSetStatus proto.InternalMessageInfo func (m *WorkflowTemplate) Reset() { *m = WorkflowTemplate{} } func (*WorkflowTemplate) ProtoMessage() {} func (*WorkflowTemplate) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{122} + return fileDescriptor_724696e352c3df5f, []int{143} } func (m *WorkflowTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3483,7 +4071,7 @@ var xxx_messageInfo_WorkflowTemplate proto.InternalMessageInfo func (m *WorkflowTemplateList) Reset() { *m = WorkflowTemplateList{} } func (*WorkflowTemplateList) ProtoMessage() {} func (*WorkflowTemplateList) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{123} + return fileDescriptor_724696e352c3df5f, []int{144} } func (m *WorkflowTemplateList) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3511,7 +4099,7 @@ var xxx_messageInfo_WorkflowTemplateList proto.InternalMessageInfo func (m *WorkflowTemplateRef) Reset() { *m = WorkflowTemplateRef{} } func (*WorkflowTemplateRef) ProtoMessage() {} func (*WorkflowTemplateRef) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{124} + return fileDescriptor_724696e352c3df5f, []int{145} } func (m *WorkflowTemplateRef) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3539,7 +4127,7 @@ var xxx_messageInfo_WorkflowTemplateRef proto.InternalMessageInfo func (m *ZipStrategy) Reset() { *m = ZipStrategy{} } func (*ZipStrategy) ProtoMessage() {} func (*ZipStrategy) Descriptor() ([]byte, []int) { - return fileDescriptor_724696e352c3df5f, []int{125} + return fileDescriptor_724696e352c3df5f, []int{146} } func (m *ZipStrategy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -3568,17 +4156,39 @@ func init() { proto.RegisterType((*Amount)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Amount") proto.RegisterType((*ArchiveStrategy)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArchiveStrategy") proto.RegisterType((*Arguments)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Arguments") + proto.RegisterType((*ArtGCStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtGCStatus") + proto.RegisterMapType((map[string]bool)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtGCStatus.PodsRecoupedEntry") + proto.RegisterMapType((map[ArtifactGCStrategy]bool)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtGCStatus.StrategiesProcessedEntry") proto.RegisterType((*Artifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Artifact") + proto.RegisterType((*ArtifactGC)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactGC") + proto.RegisterType((*ArtifactGCSpec)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactGCSpec") + proto.RegisterMapType((map[string]ArtifactNodeSpec)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactGCSpec.ArtifactsByNodeEntry") + proto.RegisterType((*ArtifactGCStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactGCStatus") + proto.RegisterMapType((map[string]ArtifactResultNodeStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactGCStatus.ArtifactResultsByNodeEntry") proto.RegisterType((*ArtifactLocation)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactLocation") + proto.RegisterType((*ArtifactNodeSpec)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactNodeSpec") + proto.RegisterMapType((map[string]Artifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactNodeSpec.ArtifactsEntry") proto.RegisterType((*ArtifactPaths)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactPaths") proto.RegisterType((*ArtifactRepository)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactRepository") proto.RegisterType((*ArtifactRepositoryRef)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactRepositoryRef") proto.RegisterType((*ArtifactRepositoryRefStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactRepositoryRefStatus") + proto.RegisterType((*ArtifactResult)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactResult") + proto.RegisterType((*ArtifactResultNodeStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactResultNodeStatus") + proto.RegisterMapType((map[string]ArtifactResult)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactResultNodeStatus.ArtifactResultsEntry") + proto.RegisterType((*ArtifactSearchQuery)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactSearchQuery") + proto.RegisterMapType((map[ArtifactGCStrategy]bool)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactSearchQuery.ArtifactGCStrategiesEntry") + proto.RegisterMapType((map[NodeType]bool)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactSearchQuery.NodeTypesEntry") + proto.RegisterType((*ArtifactSearchResult)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactSearchResult") proto.RegisterType((*ArtifactoryArtifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactoryArtifact") proto.RegisterType((*ArtifactoryArtifactRepository)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactoryArtifactRepository") proto.RegisterType((*ArtifactoryAuth)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ArtifactoryAuth") + proto.RegisterType((*AzureArtifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.AzureArtifact") + proto.RegisterType((*AzureArtifactRepository)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.AzureArtifactRepository") + proto.RegisterType((*AzureBlobContainer)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.AzureBlobContainer") proto.RegisterType((*Backoff)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Backoff") + proto.RegisterType((*BasicAuth)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.BasicAuth") proto.RegisterType((*Cache)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Cache") + proto.RegisterType((*ClientCertAuth)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClientCertAuth") proto.RegisterType((*ClusterWorkflowTemplate)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplate") proto.RegisterType((*ClusterWorkflowTemplateList)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ClusterWorkflowTemplateList") proto.RegisterType((*Condition)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Condition") @@ -3610,6 +4220,8 @@ func init() { proto.RegisterType((*HDFSKrbConfig)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HDFSKrbConfig") proto.RegisterType((*HTTP)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTP") proto.RegisterType((*HTTPArtifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTPArtifact") + proto.RegisterType((*HTTPAuth)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTPAuth") + proto.RegisterType((*HTTPBodySource)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTPBodySource") proto.RegisterType((*HTTPHeader)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTPHeader") proto.RegisterType((*HTTPHeaderSource)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.HTTPHeaderSource") proto.RegisterType((*Header)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Header") @@ -3621,6 +4233,7 @@ func init() { proto.RegisterType((*LabelValues)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LabelValues") proto.RegisterType((*LifecycleHook)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.LifecycleHook") proto.RegisterType((*Link)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Link") + proto.RegisterType((*ManifestFrom)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.ManifestFrom") proto.RegisterType((*MemoizationStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.MemoizationStatus") proto.RegisterType((*Memoize)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Memoize") proto.RegisterType((*Metadata)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Metadata") @@ -3636,6 +4249,8 @@ func init() { proto.RegisterMapType((ResourcesDuration)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.NodeStatus.ResourcesDurationEntry") proto.RegisterType((*NodeSynchronizationStatus)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.NodeSynchronizationStatus") proto.RegisterType((*NoneStrategy)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.NoneStrategy") + proto.RegisterType((*OAuth2Auth)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.OAuth2Auth") + proto.RegisterType((*OAuth2EndpointParam)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.OAuth2EndpointParam") proto.RegisterType((*OSSArtifact)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.OSSArtifact") proto.RegisterType((*OSSArtifactRepository)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.OSSArtifactRepository") proto.RegisterType((*OSSBucket)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.OSSBucket") @@ -3678,6 +4293,8 @@ func init() { proto.RegisterType((*Version)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Version") proto.RegisterType((*VolumeClaimGC)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.VolumeClaimGC") proto.RegisterType((*Workflow)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.Workflow") + proto.RegisterType((*WorkflowArtifactGCTask)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowArtifactGCTask") + proto.RegisterType((*WorkflowArtifactGCTaskList)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowArtifactGCTaskList") proto.RegisterType((*WorkflowEventBinding)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowEventBinding") proto.RegisterType((*WorkflowEventBindingList)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowEventBindingList") proto.RegisterType((*WorkflowEventBindingSpec)(nil), "github.com.argoproj.argo_workflows.v3.pkg.apis.workflow.v1alpha1.WorkflowEventBindingSpec") @@ -3714,589 +4331,671 @@ func init() { } var fileDescriptor_724696e352c3df5f = []byte{ - // 9301 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x7d, 0x5b, 0x70, 0x24, 0xd7, - 0x75, 0x18, 0x7b, 0x80, 0x01, 0x66, 0x0e, 0x80, 0x05, 0xf6, 0xee, 0x6b, 0x08, 0x92, 0x0b, 0xba, - 0x69, 0x32, 0x5c, 0x9b, 0x02, 0xcc, 0x5d, 0x31, 0x61, 0xa4, 0x8a, 0x2c, 0x0c, 0xb0, 0x78, 0x10, - 0x4f, 0xde, 0xc1, 0xee, 0x86, 0x14, 0x43, 0xab, 0x31, 0x73, 0x31, 0xd3, 0xc4, 0x4c, 0xf7, 0xb0, - 0xbb, 0x07, 0x58, 0xf0, 0x21, 0x29, 0xb2, 0x2d, 0x89, 0xb1, 0x62, 0xe5, 0x61, 0xcb, 0xb2, 0x92, - 0x54, 0xb9, 0x14, 0x2b, 0x56, 0x39, 0xae, 0xa4, 0x54, 0x95, 0xca, 0x87, 0xfd, 0x9b, 0x4a, 0x29, - 0x95, 0x54, 0xc5, 0x29, 0x2b, 0xb1, 0x3e, 0x12, 0x28, 0x42, 0x62, 0xfd, 0xa4, 0x54, 0x95, 0xb8, - 0x22, 0xc5, 0xd9, 0xe4, 0x23, 0x75, 0x9f, 0x7d, 0x6f, 0x4f, 0x0f, 0x76, 0xb0, 0xdb, 0xc0, 0xb2, - 0xe2, 0xbf, 0x99, 0x73, 0xcf, 0x3d, 0xe7, 0x3e, 0xcf, 0x3d, 0xf7, 0x9c, 0x73, 0x4f, 0xc3, 0x66, - 0xdd, 0x8d, 0x1a, 0x9d, 0xed, 0xe9, 0xaa, 0xdf, 0x9a, 0x71, 0x82, 0xba, 0xdf, 0x0e, 0xfc, 0xb7, - 0xd8, 0x8f, 0x8f, 0xec, 0xfb, 0xc1, 0xee, 0x4e, 0xd3, 0xdf, 0x0f, 0x67, 0xf6, 0x6e, 0xcc, 0xb4, - 0x77, 0xeb, 0x33, 0x4e, 0xdb, 0x0d, 0x67, 0x24, 0x74, 0x66, 0xef, 0x45, 0xa7, 0xd9, 0x6e, 0x38, - 0x2f, 0xce, 0xd4, 0x89, 0x47, 0x02, 0x27, 0x22, 0xb5, 0xe9, 0x76, 0xe0, 0x47, 0x3e, 0xfa, 0x64, - 0x4c, 0x71, 0x5a, 0x52, 0x64, 0x3f, 0x7e, 0x41, 0x51, 0x9c, 0xde, 0xbb, 0x31, 0xdd, 0xde, 0xad, - 0x4f, 0x53, 0x8a, 0xd3, 0x12, 0x3a, 0x2d, 0x29, 0x4e, 0x7e, 0x44, 0x6b, 0x53, 0xdd, 0xaf, 0xfb, - 0x33, 0x8c, 0xf0, 0x76, 0x67, 0x87, 0xfd, 0x63, 0x7f, 0xd8, 0x2f, 0xce, 0x70, 0xd2, 0xde, 0x7d, - 0x39, 0x9c, 0x76, 0x7d, 0xda, 0xbe, 0x99, 0xaa, 0x1f, 0x90, 0x99, 0xbd, 0xae, 0x46, 0x4d, 0x5e, - 0xd3, 0x70, 0xda, 0x7e, 0xd3, 0xad, 0x1e, 0xcc, 0xec, 0xbd, 0xb8, 0x4d, 0xa2, 0xee, 0xf6, 0x4f, - 0x7e, 0x34, 0x46, 0x6d, 0x39, 0xd5, 0x86, 0xeb, 0x91, 0xe0, 0x20, 0xee, 0x7f, 0x8b, 0x44, 0x4e, - 0x1a, 0x83, 0x99, 0x5e, 0xb5, 0x82, 0x8e, 0x17, 0xb9, 0x2d, 0xd2, 0x55, 0xe1, 0x2f, 0xde, 0xaf, - 0x42, 0x58, 0x6d, 0x90, 0x96, 0xd3, 0x55, 0xef, 0x46, 0xaf, 0x7a, 0x9d, 0xc8, 0x6d, 0xce, 0xb8, - 0x5e, 0x14, 0x46, 0x41, 0xb2, 0x92, 0x7d, 0x13, 0x86, 0x66, 0x5b, 0x7e, 0xc7, 0x8b, 0xd0, 0xc7, - 0x21, 0xbf, 0xe7, 0x34, 0x3b, 0xa4, 0x64, 0x3d, 0x6d, 0x3d, 0x5f, 0x2c, 0x3f, 0xfb, 0x9d, 0xc3, - 0xa9, 0xc7, 0x8e, 0x0e, 0xa7, 0xf2, 0xb7, 0x29, 0xf0, 0xde, 0xe1, 0xd4, 0x45, 0xe2, 0x55, 0xfd, - 0x9a, 0xeb, 0xd5, 0x67, 0xde, 0x0a, 0x7d, 0x6f, 0x7a, 0xbd, 0xd3, 0xda, 0x26, 0x01, 0xe6, 0x75, - 0xec, 0x3f, 0xca, 0xc1, 0xf8, 0x6c, 0x50, 0x6d, 0xb8, 0x7b, 0xa4, 0x12, 0x51, 0xfa, 0xf5, 0x03, - 0xd4, 0x80, 0x81, 0xc8, 0x09, 0x18, 0xb9, 0x91, 0xeb, 0x6b, 0xd3, 0x0f, 0x3b, 0xf9, 0xd3, 0x5b, - 0x4e, 0x20, 0x69, 0x97, 0x87, 0x8f, 0x0e, 0xa7, 0x06, 0xb6, 0x9c, 0x00, 0x53, 0x16, 0xa8, 0x09, - 0x83, 0x9e, 0xef, 0x91, 0x52, 0x8e, 0xb1, 0x5a, 0x7f, 0x78, 0x56, 0xeb, 0xbe, 0xa7, 0xfa, 0x51, - 0x2e, 0x1c, 0x1d, 0x4e, 0x0d, 0x52, 0x08, 0x66, 0x5c, 0x68, 0xbf, 0xde, 0x71, 0xdb, 0xa5, 0x81, - 0xac, 0xfa, 0xf5, 0xba, 0xdb, 0x36, 0xfb, 0xf5, 0xba, 0xdb, 0xc6, 0x94, 0x85, 0xfd, 0x41, 0x0e, - 0x8a, 0xb3, 0x41, 0xbd, 0xd3, 0x22, 0x5e, 0x14, 0xa2, 0xcf, 0x02, 0xb4, 0x9d, 0xc0, 0x69, 0x91, - 0x88, 0x04, 0x61, 0xc9, 0x7a, 0x7a, 0xe0, 0xf9, 0x91, 0xeb, 0x2b, 0x0f, 0xcf, 0x7e, 0x53, 0xd2, - 0x2c, 0x23, 0x31, 0xe5, 0xa0, 0x40, 0x21, 0xd6, 0x58, 0xa2, 0x77, 0xa1, 0xe8, 0x04, 0x91, 0xbb, - 0xe3, 0x54, 0xa3, 0xb0, 0x94, 0x63, 0xfc, 0x5f, 0x79, 0x78, 0xfe, 0xb3, 0x82, 0x64, 0xf9, 0xbc, - 0x60, 0x5f, 0x94, 0x90, 0x10, 0xc7, 0xfc, 0xec, 0xdf, 0xc9, 0x43, 0x41, 0x16, 0xa0, 0xa7, 0x61, - 0xd0, 0x73, 0x5a, 0x72, 0xa9, 0x8e, 0x8a, 0x8a, 0x83, 0xeb, 0x4e, 0x8b, 0x4e, 0x92, 0xd3, 0x22, - 0x14, 0xa3, 0xed, 0x44, 0x0d, 0xb6, 0x24, 0x34, 0x8c, 0x4d, 0x27, 0x6a, 0x60, 0x56, 0x82, 0x9e, - 0x84, 0xc1, 0x96, 0x5f, 0x23, 0x6c, 0x1e, 0xf3, 0x7c, 0x92, 0xd7, 0xfc, 0x1a, 0xc1, 0x0c, 0x4a, - 0xeb, 0xef, 0x04, 0x7e, 0xab, 0x34, 0x68, 0xd6, 0x5f, 0x08, 0xfc, 0x16, 0x66, 0x25, 0xe8, 0x6b, - 0x16, 0x4c, 0xc8, 0xe6, 0xad, 0xfa, 0x55, 0x27, 0x72, 0x7d, 0xaf, 0x94, 0x67, 0x8b, 0x02, 0x67, - 0x37, 0x2a, 0x92, 0x72, 0xb9, 0x24, 0x9a, 0x30, 0x91, 0x2c, 0xc1, 0x5d, 0xad, 0x40, 0xd7, 0x01, - 0xea, 0x4d, 0x7f, 0xdb, 0x69, 0xd2, 0x01, 0x29, 0x0d, 0xb1, 0x2e, 0xa8, 0xc9, 0x5d, 0x54, 0x25, - 0x58, 0xc3, 0x42, 0x77, 0x61, 0xd8, 0xe1, 0x1b, 0xb8, 0x34, 0xcc, 0x3a, 0xf1, 0x6a, 0x16, 0x9d, - 0x30, 0x24, 0x42, 0x79, 0xe4, 0xe8, 0x70, 0x6a, 0x58, 0x00, 0xb1, 0x64, 0x87, 0x5e, 0x80, 0x82, - 0xdf, 0xa6, 0xed, 0x76, 0x9a, 0xa5, 0xc2, 0xd3, 0xd6, 0xf3, 0x85, 0xf2, 0x84, 0x68, 0x6b, 0x61, - 0x43, 0xc0, 0xb1, 0xc2, 0x40, 0xd7, 0x60, 0x38, 0xec, 0x6c, 0xd3, 0x79, 0x2c, 0x15, 0x59, 0xc7, - 0xc6, 0x05, 0xf2, 0x70, 0x85, 0x83, 0xb1, 0x2c, 0x47, 0x2f, 0xc1, 0x48, 0x40, 0xaa, 0x9d, 0x20, - 0x24, 0x74, 0x62, 0x4b, 0xc0, 0x68, 0x5f, 0x10, 0xe8, 0x23, 0x38, 0x2e, 0xc2, 0x3a, 0x1e, 0xfa, - 0x04, 0x9c, 0xa3, 0x13, 0x7c, 0xf3, 0x6e, 0x3b, 0x20, 0x61, 0x48, 0x67, 0x75, 0x84, 0x31, 0xba, - 0x2c, 0x6a, 0x9e, 0x5b, 0x30, 0x4a, 0x71, 0x02, 0xdb, 0xfe, 0xfd, 0x61, 0xe8, 0x9a, 0x24, 0xf4, - 0x22, 0x8c, 0x88, 0xfe, 0xae, 0xfa, 0xf5, 0x90, 0x2d, 0xdc, 0x42, 0x79, 0x9c, 0xb6, 0x63, 0x36, - 0x06, 0x63, 0x1d, 0x07, 0xd5, 0x20, 0x17, 0xde, 0x10, 0x32, 0x6d, 0xf5, 0xe1, 0x27, 0xa3, 0x72, - 0x43, 0xed, 0xb4, 0xa1, 0xa3, 0xc3, 0xa9, 0x5c, 0xe5, 0x06, 0xce, 0x85, 0x37, 0xa8, 0x34, 0xab, - 0xbb, 0x51, 0x76, 0xd2, 0x6c, 0xd1, 0x8d, 0x14, 0x1f, 0x26, 0xcd, 0x16, 0xdd, 0x08, 0x53, 0x16, - 0x54, 0x4a, 0x37, 0xa2, 0xa8, 0xcd, 0xb6, 0x54, 0x26, 0x52, 0x7a, 0x69, 0x6b, 0x6b, 0x53, 0xf1, - 0x62, 0x1b, 0x98, 0x42, 0x30, 0xe3, 0x82, 0xbe, 0x64, 0xd1, 0x11, 0xe7, 0x85, 0x7e, 0x70, 0x20, - 0x76, 0xe6, 0xad, 0xec, 0x76, 0xa6, 0x1f, 0x1c, 0x28, 0xe6, 0x62, 0x22, 0x55, 0x01, 0xd6, 0x59, - 0xb3, 0x8e, 0xd7, 0x76, 0x42, 0xb6, 0x11, 0xb3, 0xe9, 0xf8, 0xfc, 0x42, 0x25, 0xd1, 0xf1, 0xf9, - 0x85, 0x0a, 0x66, 0x5c, 0xe8, 0x84, 0x06, 0xce, 0xbe, 0xd8, 0xc4, 0x19, 0x4c, 0x28, 0x76, 0xf6, - 0xcd, 0x09, 0xc5, 0xce, 0x3e, 0xa6, 0x2c, 0x28, 0x27, 0x3f, 0x0c, 0xd9, 0x9e, 0xcd, 0x84, 0xd3, - 0x46, 0xa5, 0x62, 0x72, 0xda, 0xa8, 0x54, 0x30, 0x65, 0xc1, 0x16, 0x69, 0x35, 0x64, 0x1b, 0x3e, - 0x9b, 0x45, 0x3a, 0x97, 0xe0, 0xb4, 0x38, 0x57, 0xc1, 0x94, 0x85, 0xfd, 0x81, 0x05, 0x63, 0xb2, - 0x88, 0x0a, 0x91, 0x10, 0xdd, 0x85, 0x82, 0x9c, 0x4c, 0xa1, 0xcb, 0x64, 0x79, 0xe8, 0x29, 0x51, - 0x27, 0x21, 0x58, 0x71, 0xb3, 0x7f, 0x2f, 0x0f, 0x48, 0x81, 0x49, 0xdb, 0x0f, 0x5d, 0xb6, 0x9c, - 0x1e, 0x40, 0x94, 0x78, 0x9a, 0x28, 0xb9, 0x9d, 0xa5, 0x28, 0x89, 0x9b, 0x65, 0x08, 0x95, 0xbf, - 0x93, 0xd8, 0x7c, 0x5c, 0xba, 0xfc, 0xc2, 0xa9, 0x6c, 0x3e, 0xad, 0x09, 0xc7, 0x6f, 0xc3, 0x3d, - 0xb1, 0x0d, 0xb9, 0xfc, 0xf9, 0xab, 0xd9, 0x6e, 0x43, 0xad, 0x15, 0xc9, 0x0d, 0x19, 0xf0, 0x6d, - 0xc2, 0x05, 0xd0, 0x9d, 0x4c, 0xb7, 0x89, 0xc6, 0xd5, 0xdc, 0x30, 0x01, 0xdf, 0x30, 0x43, 0x59, - 0xf1, 0xd4, 0x36, 0x4c, 0x92, 0xa7, 0xda, 0x3a, 0x6f, 0xc3, 0xa5, 0x6e, 0x1c, 0x4c, 0x76, 0xd0, - 0x0c, 0x14, 0xab, 0xbe, 0xb7, 0xe3, 0xd6, 0xd7, 0x9c, 0xb6, 0x50, 0xd9, 0x94, 0xae, 0x37, 0x27, - 0x0b, 0x70, 0x8c, 0x83, 0x9e, 0x82, 0x81, 0x5d, 0x72, 0x20, 0x74, 0xb7, 0x11, 0x81, 0x3a, 0xb0, - 0x42, 0x0e, 0x30, 0x85, 0x7f, 0xac, 0xf0, 0xb5, 0xdf, 0x9a, 0x7a, 0xec, 0x73, 0xff, 0xf1, 0xe9, - 0xc7, 0xec, 0x7f, 0x37, 0x00, 0x4f, 0xa4, 0xf2, 0xac, 0x44, 0x4e, 0xd4, 0x09, 0xd1, 0xef, 0x59, - 0x70, 0xc9, 0x49, 0x2b, 0x17, 0x3b, 0xf9, 0x4e, 0x76, 0x2b, 0xd2, 0x20, 0x5f, 0x7e, 0x4a, 0x34, - 0x3a, 0x7d, 0x44, 0x70, 0x7a, 0xa3, 0xe8, 0x40, 0x51, 0xe5, 0x35, 0x6c, 0x3b, 0x55, 0x22, 0x7a, - 0xaf, 0x06, 0x6a, 0x5d, 0x16, 0xe0, 0x18, 0x87, 0x2a, 0x43, 0x35, 0xb2, 0xe3, 0x74, 0x9a, 0xfc, - 0x00, 0x2f, 0xc4, 0xca, 0xd0, 0x3c, 0x07, 0x63, 0x59, 0x8e, 0xfe, 0xbe, 0x05, 0xa8, 0x9b, 0xab, - 0xd8, 0x0c, 0x5b, 0xa7, 0x31, 0x0e, 0xe5, 0xcb, 0x47, 0x87, 0x53, 0x29, 0x02, 0x0c, 0xa7, 0xb4, - 0x43, 0x9b, 0xd3, 0x7f, 0x6d, 0xc1, 0x85, 0x94, 0x6d, 0x4e, 0x17, 0x45, 0x27, 0x68, 0x8a, 0xf5, - 0xa3, 0x16, 0xc5, 0x2d, 0xbc, 0x8a, 0x29, 0x1c, 0xfd, 0x9a, 0x05, 0xe3, 0xda, 0x6e, 0x9f, 0xed, - 0x08, 0xe5, 0x3f, 0x23, 0x45, 0xd6, 0x20, 0x5c, 0xbe, 0x22, 0xd8, 0x8f, 0x27, 0x0a, 0x70, 0xb2, - 0x09, 0xf6, 0x0f, 0x2c, 0x78, 0xea, 0x58, 0xa1, 0x95, 0xda, 0x70, 0xeb, 0x91, 0x37, 0x9c, 0x2e, - 0xad, 0x80, 0xb4, 0xfd, 0x5b, 0x78, 0x55, 0xac, 0x44, 0xb5, 0xb4, 0x30, 0x07, 0x63, 0x59, 0x6e, - 0xff, 0xb1, 0x05, 0x49, 0x7a, 0xc8, 0x81, 0x73, 0x9d, 0x90, 0x04, 0x74, 0xa9, 0x56, 0x48, 0x35, - 0x20, 0xf2, 0xec, 0x7c, 0x76, 0x9a, 0x5b, 0x29, 0x68, 0x83, 0xa7, 0xab, 0x7e, 0x40, 0xa6, 0xf7, - 0x5e, 0x9c, 0xe6, 0x18, 0x2b, 0xe4, 0xa0, 0x42, 0x9a, 0x84, 0xd2, 0x28, 0x23, 0xaa, 0x67, 0xdf, - 0x32, 0x08, 0xe0, 0x04, 0x41, 0xca, 0xa2, 0xed, 0x84, 0xe1, 0xbe, 0x1f, 0xd4, 0x04, 0x8b, 0xdc, - 0x89, 0x59, 0x6c, 0x1a, 0x04, 0x70, 0x82, 0xa0, 0xfd, 0x2f, 0x2c, 0x18, 0x2e, 0x3b, 0xd5, 0x5d, - 0x7f, 0x67, 0x87, 0x5e, 0x53, 0x6a, 0x9d, 0x80, 0x5f, 0xf3, 0xf8, 0x22, 0x54, 0x67, 0xf7, 0xbc, - 0x80, 0x63, 0x85, 0x81, 0xb6, 0x60, 0x88, 0x0f, 0x87, 0x68, 0xd4, 0xcf, 0x69, 0x8d, 0x52, 0xd6, - 0x19, 0x36, 0x73, 0x9d, 0xc8, 0x6d, 0x4e, 0x73, 0xeb, 0xcc, 0xf4, 0xb2, 0x17, 0x6d, 0x04, 0x95, - 0x28, 0x70, 0xbd, 0x7a, 0x19, 0x8e, 0x0e, 0xa7, 0x86, 0x16, 0x18, 0x0d, 0x2c, 0x68, 0xd1, 0x1b, - 0x4d, 0xcb, 0xb9, 0x2b, 0xd9, 0xb1, 0x3d, 0x5f, 0x8c, 0x6f, 0x34, 0x6b, 0x71, 0x11, 0xd6, 0xf1, - 0xec, 0x37, 0x21, 0x3f, 0xe7, 0x54, 0x1b, 0x04, 0xdd, 0x4a, 0x4a, 0xe2, 0x91, 0xeb, 0xcf, 0xa7, - 0x8d, 0x96, 0x92, 0xca, 0xfa, 0x80, 0x8d, 0xf5, 0x92, 0xd7, 0xf6, 0x8f, 0x2c, 0xb8, 0x32, 0xd7, - 0xec, 0x84, 0x11, 0x09, 0xee, 0x88, 0x25, 0xb8, 0x45, 0x5a, 0xed, 0xa6, 0x13, 0x11, 0xf4, 0x69, - 0x28, 0xb4, 0x48, 0xe4, 0xd4, 0x9c, 0xc8, 0x11, 0x1c, 0x7b, 0x0f, 0x05, 0x5b, 0xc4, 0x14, 0x9b, - 0xb6, 0x61, 0x63, 0xfb, 0x2d, 0x52, 0x8d, 0xd6, 0x48, 0xe4, 0xc4, 0x77, 0xd7, 0x18, 0x86, 0x15, - 0x55, 0xd4, 0x86, 0xc1, 0xb0, 0x4d, 0xaa, 0xd9, 0x59, 0x7f, 0x64, 0x1f, 0x2a, 0x6d, 0x52, 0x8d, - 0xaf, 0xfe, 0xf4, 0x1f, 0x66, 0x9c, 0xec, 0xff, 0x63, 0xc1, 0x13, 0x3d, 0xfa, 0xbb, 0xea, 0x86, - 0x11, 0x7a, 0xa3, 0xab, 0xcf, 0xd3, 0xfd, 0xf5, 0x99, 0xd6, 0x66, 0x3d, 0x56, 0x4b, 0x4b, 0x42, - 0xb4, 0xfe, 0x7e, 0x06, 0xf2, 0x6e, 0x44, 0x5a, 0xd2, 0x04, 0xf3, 0xda, 0xc3, 0x77, 0xb8, 0x47, - 0x5f, 0xca, 0x63, 0xd2, 0x06, 0xb8, 0x4c, 0xf9, 0x61, 0xce, 0xd6, 0xfe, 0x57, 0x16, 0xd0, 0x65, - 0x50, 0x73, 0xc5, 0xc5, 0x76, 0x30, 0x3a, 0x68, 0x4b, 0x53, 0x8c, 0x3c, 0xf7, 0x06, 0xb7, 0x0e, - 0xda, 0xe4, 0xde, 0xe1, 0xd4, 0x98, 0x42, 0xa4, 0x00, 0xcc, 0x50, 0xd1, 0x9b, 0x30, 0x14, 0xb2, - 0xf3, 0x59, 0x48, 0x96, 0x05, 0x51, 0x69, 0x88, 0x9f, 0xda, 0xf7, 0x0e, 0xa7, 0xfa, 0xb2, 0xb4, - 0x4e, 0x2b, 0xda, 0xbc, 0x1e, 0x16, 0x54, 0xa9, 0xe8, 0x6a, 0x91, 0x30, 0x74, 0xea, 0x44, 0xec, - 0x10, 0x25, 0xba, 0xd6, 0x38, 0x18, 0xcb, 0x72, 0xfb, 0xd7, 0x2d, 0xa0, 0x4d, 0x8c, 0x1c, 0xca, - 0x62, 0x9d, 0xde, 0xfe, 0xd7, 0xd9, 0x16, 0xe1, 0x00, 0x31, 0x79, 0x4f, 0xf5, 0xd8, 0x22, 0x1c, - 0xc9, 0xd0, 0x65, 0x38, 0x08, 0xc7, 0x24, 0xd0, 0x47, 0x61, 0xb4, 0x46, 0xda, 0xc4, 0xab, 0x11, - 0xaf, 0xea, 0x12, 0x3e, 0x69, 0xc5, 0xf2, 0xc4, 0xd1, 0xe1, 0xd4, 0xe8, 0xbc, 0x06, 0xc7, 0x06, - 0x96, 0xfd, 0x0d, 0x0b, 0x1e, 0x57, 0xe4, 0x2a, 0x24, 0xc2, 0x24, 0x0a, 0x0e, 0x94, 0x65, 0xf5, - 0x64, 0xa2, 0xe8, 0x0e, 0x95, 0xe4, 0x51, 0xc0, 0x99, 0x3f, 0x98, 0x2c, 0x1a, 0xe1, 0x72, 0x9f, - 0x11, 0xc1, 0x92, 0x9a, 0xfd, 0xab, 0x03, 0x70, 0x51, 0x6f, 0xa4, 0xda, 0xf3, 0xbf, 0x68, 0x01, - 0xa8, 0x11, 0xa0, 0x0a, 0x37, 0x5d, 0xa7, 0x1b, 0x19, 0xac, 0x53, 0x7d, 0xa6, 0x62, 0xa9, 0xa0, - 0xc0, 0x21, 0xd6, 0xd8, 0xa2, 0xd7, 0x60, 0x74, 0xcf, 0x6f, 0x76, 0x5a, 0x64, 0xcd, 0xef, 0x78, - 0x51, 0x58, 0x1a, 0x60, 0xcd, 0x98, 0x4a, 0x9b, 0xcc, 0xdb, 0x31, 0x5e, 0xf9, 0xa2, 0x20, 0x3b, - 0xaa, 0x01, 0x43, 0x6c, 0x90, 0xa2, 0x67, 0xf6, 0x58, 0xa0, 0x4f, 0x89, 0xd0, 0xee, 0x3f, 0x95, - 0x61, 0x1f, 0x93, 0xb3, 0x5e, 0x3e, 0x7f, 0x74, 0x38, 0x35, 0x66, 0x80, 0xb0, 0xd9, 0x08, 0xfb, - 0x35, 0x60, 0x63, 0xe1, 0x7a, 0x1d, 0xb2, 0xe1, 0xa1, 0x67, 0x20, 0x4f, 0x82, 0xc0, 0x0f, 0xc4, - 0x0d, 0x51, 0x6d, 0xe6, 0x9b, 0x14, 0x88, 0x79, 0x19, 0x7a, 0x8e, 0x9e, 0x53, 0x6e, 0x93, 0xd4, - 0xd8, 0xda, 0x28, 0x94, 0xcf, 0xc9, 0xbd, 0xb8, 0xc0, 0xa0, 0x58, 0x94, 0xda, 0xd3, 0x30, 0x3c, - 0x47, 0xfb, 0x4e, 0x02, 0x4a, 0x57, 0x77, 0x14, 0x8c, 0x19, 0x8e, 0x02, 0xe9, 0x10, 0xd8, 0x82, - 0x4b, 0x73, 0x01, 0x71, 0x22, 0x52, 0xb9, 0x51, 0xee, 0x54, 0x77, 0x49, 0xc4, 0x4d, 0x79, 0x21, - 0xfa, 0x38, 0x8c, 0xf9, 0x4c, 0x8a, 0xaf, 0xfa, 0xd5, 0x5d, 0xd7, 0xab, 0x0b, 0xc5, 0xf5, 0x92, - 0xa0, 0x32, 0xb6, 0xa1, 0x17, 0x62, 0x13, 0xd7, 0xfe, 0xaf, 0x39, 0x18, 0x9d, 0x0b, 0x7c, 0x4f, - 0x4a, 0xaa, 0x33, 0x38, 0x5d, 0x22, 0xe3, 0x74, 0xc9, 0xc0, 0xb2, 0xab, 0xb7, 0xbf, 0xd7, 0x09, - 0x83, 0xde, 0x53, 0x22, 0x72, 0x20, 0x2b, 0x05, 0xdd, 0xe0, 0xcb, 0x68, 0xc7, 0x93, 0x6d, 0x0a, - 0x50, 0xfb, 0x4f, 0x2c, 0x98, 0xd0, 0xd1, 0xcf, 0xe0, 0x50, 0x0b, 0xcd, 0x43, 0x6d, 0x3d, 0xdb, - 0xfe, 0xf6, 0x38, 0xc9, 0x3e, 0x18, 0x32, 0xfb, 0x49, 0x27, 0x00, 0x7d, 0xcd, 0x82, 0xd1, 0x7d, - 0x0d, 0x20, 0x3a, 0x9b, 0xb5, 0x5e, 0xf1, 0xd3, 0x52, 0xcc, 0xe8, 0xd0, 0x7b, 0x89, 0xff, 0xd8, - 0x68, 0x09, 0x95, 0xfb, 0x61, 0xb5, 0x41, 0x6a, 0x9d, 0xa6, 0xbc, 0x1e, 0xaa, 0x21, 0xad, 0x08, - 0x38, 0x56, 0x18, 0xe8, 0x0d, 0x38, 0x5f, 0xf5, 0xbd, 0x6a, 0x27, 0x08, 0x88, 0x57, 0x3d, 0xd8, - 0x64, 0xbe, 0x4d, 0x71, 0x20, 0x4e, 0x8b, 0x6a, 0xe7, 0xe7, 0x92, 0x08, 0xf7, 0xd2, 0x80, 0xb8, - 0x9b, 0x10, 0xb7, 0xc3, 0x87, 0xf4, 0xc8, 0x62, 0x77, 0xc8, 0x82, 0x6e, 0x87, 0x67, 0x60, 0x2c, - 0xcb, 0xd1, 0x2d, 0xb8, 0x12, 0x46, 0xf4, 0x7e, 0xe1, 0xd5, 0xe7, 0x89, 0x53, 0x6b, 0xba, 0x1e, - 0x55, 0xe1, 0x7d, 0xaf, 0xc6, 0x8d, 0x22, 0x03, 0xe5, 0x27, 0x8e, 0x0e, 0xa7, 0xae, 0x54, 0xd2, - 0x51, 0x70, 0xaf, 0xba, 0xe8, 0x4d, 0x98, 0x0c, 0x3b, 0xd5, 0x2a, 0x09, 0xc3, 0x9d, 0x4e, 0xf3, - 0x15, 0x7f, 0x3b, 0x5c, 0x72, 0x43, 0x7a, 0xff, 0x58, 0x75, 0x5b, 0x6e, 0xc4, 0x4c, 0x1f, 0xf9, - 0xf2, 0xd5, 0xa3, 0xc3, 0xa9, 0xc9, 0x4a, 0x4f, 0x2c, 0x7c, 0x0c, 0x05, 0x84, 0xe1, 0x32, 0x17, - 0x7e, 0x5d, 0xb4, 0x87, 0x19, 0xed, 0xc9, 0xa3, 0xc3, 0xa9, 0xcb, 0x0b, 0xa9, 0x18, 0xb8, 0x47, - 0x4d, 0x3a, 0x83, 0x91, 0xdb, 0x22, 0xef, 0xf8, 0x1e, 0x61, 0x76, 0x53, 0x6d, 0x06, 0xb7, 0x04, - 0x1c, 0x2b, 0x0c, 0xf4, 0x56, 0xbc, 0x12, 0xe9, 0x76, 0x11, 0xf6, 0xcf, 0x93, 0x4b, 0xb8, 0x8b, - 0x47, 0x87, 0x53, 0x13, 0x77, 0x34, 0x4a, 0x74, 0xcb, 0x61, 0x83, 0xb6, 0xfd, 0x47, 0x39, 0x40, - 0xdd, 0x22, 0x02, 0xad, 0xc0, 0x90, 0x53, 0x8d, 0xdc, 0x3d, 0x22, 0x1c, 0x8e, 0xcf, 0xa4, 0x1d, - 0x9f, 0x9c, 0x15, 0x26, 0x3b, 0x84, 0xae, 0x10, 0x12, 0xcb, 0x95, 0x59, 0x56, 0x15, 0x0b, 0x12, - 0xc8, 0x87, 0xf3, 0x4d, 0x27, 0x8c, 0xe4, 0x5a, 0xad, 0xd1, 0x2e, 0x0b, 0xc1, 0xfa, 0x33, 0xfd, - 0x75, 0x8a, 0xd6, 0x28, 0x5f, 0xa2, 0x2b, 0x77, 0x35, 0x49, 0x08, 0x77, 0xd3, 0x46, 0x9f, 0x65, - 0x7a, 0x08, 0x57, 0x12, 0xa5, 0x02, 0xb0, 0x92, 0xc9, 0x19, 0xcd, 0x69, 0x1a, 0x3a, 0x88, 0x60, - 0x83, 0x35, 0x96, 0xf6, 0xbf, 0x01, 0x18, 0x9e, 0x9f, 0x5d, 0xdc, 0x72, 0xc2, 0xdd, 0x3e, 0x9c, - 0x96, 0x74, 0x75, 0x08, 0x1d, 0x2a, 0xb9, 0xbf, 0xa5, 0x6e, 0x85, 0x15, 0x06, 0xf2, 0x60, 0xc8, - 0xf5, 0xe8, 0x86, 0x28, 0x9d, 0xcb, 0xca, 0x2c, 0xad, 0x34, 0x7f, 0x76, 0xf9, 0x5c, 0x66, 0xd4, - 0xb1, 0xe0, 0x82, 0xde, 0x83, 0xa2, 0x23, 0x9d, 0xd1, 0xe2, 0x58, 0x5a, 0xc9, 0xc2, 0x42, 0x21, - 0x48, 0xea, 0xfe, 0x5f, 0x01, 0xc2, 0x31, 0x43, 0xf4, 0x39, 0x0b, 0x46, 0x64, 0xd7, 0x31, 0xd9, - 0x11, 0x86, 0xab, 0xb5, 0xec, 0xfa, 0x8c, 0xc9, 0x0e, 0x37, 0x20, 0x6b, 0x00, 0xac, 0xb3, 0xec, - 0x52, 0xe5, 0xf3, 0xfd, 0xa8, 0xf2, 0x68, 0x1f, 0x8a, 0xfb, 0x6e, 0xd4, 0x60, 0x07, 0x4f, 0x69, - 0x88, 0x2d, 0xc1, 0x85, 0x87, 0x6f, 0x35, 0x25, 0x17, 0x8f, 0xd8, 0x1d, 0xc9, 0x00, 0xc7, 0xbc, - 0xd0, 0x0c, 0x67, 0xcc, 0x9c, 0xf9, 0x4c, 0x64, 0x15, 0xcd, 0x0a, 0xac, 0x00, 0xc7, 0x38, 0x74, - 0x88, 0x47, 0xe9, 0xbf, 0x0a, 0x79, 0xbb, 0x43, 0xf7, 0xb1, 0xf0, 0xec, 0x64, 0xb0, 0xae, 0x24, - 0x45, 0x3e, 0x58, 0x77, 0x34, 0x1e, 0xd8, 0xe0, 0x48, 0xf7, 0xc8, 0x7e, 0x83, 0x78, 0xc2, 0xb5, - 0xab, 0xf6, 0xc8, 0x9d, 0x06, 0xf1, 0x30, 0x2b, 0x41, 0xef, 0xf1, 0xab, 0x05, 0xd7, 0x71, 0x99, - 0x4f, 0x37, 0x13, 0xef, 0x68, 0xac, 0x37, 0x97, 0xcf, 0xc9, 0x3b, 0x05, 0xff, 0x8f, 0x35, 0x7e, - 0x54, 0x5d, 0xf6, 0xbd, 0x9b, 0x77, 0xdd, 0x48, 0xf8, 0x84, 0x95, 0xa4, 0xdb, 0x60, 0x50, 0x2c, - 0x4a, 0xb9, 0x61, 0x96, 0x2e, 0x82, 0xb0, 0x34, 0x6a, 0x5e, 0x41, 0xf9, 0x4a, 0x09, 0xb1, 0x2c, - 0x47, 0xff, 0xc0, 0x82, 0x7c, 0xc3, 0xf7, 0x77, 0xc3, 0xd2, 0x18, 0x5b, 0x1c, 0x19, 0xa8, 0x7a, - 0x42, 0xe2, 0x4c, 0x2f, 0x51, 0xb2, 0x37, 0xbd, 0x28, 0x38, 0x28, 0xbf, 0x28, 0x15, 0x20, 0x06, - 0xbb, 0x77, 0x38, 0x75, 0x6e, 0xd5, 0xdd, 0x21, 0xd5, 0x83, 0x6a, 0x93, 0x30, 0xc8, 0xe7, 0xbf, - 0xaf, 0x41, 0x6e, 0xee, 0x11, 0x2f, 0xc2, 0xbc, 0x55, 0x93, 0x1f, 0x58, 0x00, 0x31, 0x21, 0x34, - 0xc1, 0x6d, 0xf3, 0x4c, 0x88, 0x31, 0x73, 0x3c, 0x22, 0xf2, 0x3e, 0xc0, 0x25, 0x79, 0x06, 0xf7, - 0x3c, 0xa3, 0x69, 0xe2, 0x46, 0xf1, 0xb1, 0xdc, 0xcb, 0x96, 0xfd, 0x6f, 0x2d, 0x18, 0xa1, 0x9d, - 0x93, 0x22, 0xf0, 0x39, 0x18, 0x8a, 0x9c, 0xa0, 0x2e, 0xac, 0x8b, 0xda, 0x74, 0x6c, 0x31, 0x28, - 0x16, 0xa5, 0xc8, 0x83, 0x7c, 0xe4, 0x84, 0xbb, 0x52, 0xbb, 0x5c, 0xce, 0x6c, 0x88, 0x63, 0xc5, - 0x92, 0xfe, 0x0b, 0x31, 0x67, 0x83, 0x9e, 0x87, 0x02, 0x55, 0x00, 0x16, 0x9c, 0x50, 0x1a, 0xe6, - 0x47, 0xa9, 0x10, 0x5f, 0x10, 0x30, 0xac, 0x4a, 0xed, 0xbf, 0x9b, 0x83, 0xc1, 0x79, 0x7e, 0xcf, - 0x18, 0x0a, 0xfd, 0x4e, 0x50, 0x25, 0x42, 0xdf, 0xcc, 0x60, 0x4d, 0x53, 0xba, 0x15, 0x46, 0x53, - 0xd3, 0xf4, 0xd9, 0x7f, 0x2c, 0x78, 0xd1, 0x8b, 0xec, 0xb9, 0x28, 0x70, 0xbc, 0x70, 0xc7, 0x0f, - 0x5a, 0xdc, 0xa0, 0x90, 0xcb, 0x6a, 0x15, 0x6e, 0x19, 0x74, 0x2b, 0x11, 0x69, 0xc7, 0x21, 0x14, - 0x66, 0x19, 0x4e, 0xb4, 0xc1, 0xfe, 0x0d, 0x0b, 0x20, 0x6e, 0x3d, 0xfa, 0x92, 0x05, 0x63, 0x8e, - 0xee, 0x94, 0x15, 0x63, 0xb4, 0x91, 0x9d, 0x81, 0x9c, 0x91, 0xe5, 0x57, 0x6c, 0x03, 0x84, 0x4d, - 0xc6, 0xf6, 0x4b, 0x90, 0x67, 0xbb, 0x83, 0xe9, 0xe2, 0xc2, 0x32, 0x9a, 0xb4, 0xc1, 0x48, 0x8b, - 0x29, 0x56, 0x18, 0xf6, 0x1b, 0x70, 0xee, 0xe6, 0x5d, 0x52, 0xed, 0x44, 0x7e, 0xc0, 0x2d, 0xa8, - 0xe8, 0x15, 0x40, 0x21, 0x09, 0xf6, 0xdc, 0x2a, 0x99, 0xad, 0x56, 0xe9, 0xcd, 0x7a, 0x3d, 0xd6, - 0x0d, 0x26, 0x05, 0x25, 0x54, 0xe9, 0xc2, 0xc0, 0x29, 0xb5, 0xec, 0xdf, 0xb5, 0x60, 0x44, 0xf3, - 0xd0, 0xd1, 0x93, 0xba, 0x3e, 0x57, 0xe1, 0xf7, 0x6e, 0x31, 0x54, 0x2b, 0x99, 0xf8, 0x00, 0x39, - 0xc9, 0xf8, 0x18, 0x51, 0x20, 0x1c, 0x33, 0xbc, 0x8f, 0xf7, 0xce, 0xfe, 0x97, 0x16, 0x5c, 0x4a, - 0x75, 0x27, 0x3e, 0xe2, 0x66, 0xcf, 0x40, 0x71, 0x97, 0x1c, 0x2c, 0xb0, 0x35, 0x98, 0x74, 0xbe, - 0xad, 0xc8, 0x02, 0x1c, 0xe3, 0xd8, 0xdf, 0xb6, 0x20, 0xa6, 0x44, 0x45, 0xd1, 0x76, 0xdc, 0x72, - 0x4d, 0x14, 0x09, 0x4e, 0xa2, 0x14, 0xbd, 0x07, 0x57, 0xcc, 0x19, 0x64, 0x26, 0xf6, 0x93, 0xbb, - 0x2f, 0xf8, 0x9d, 0x29, 0x9d, 0x12, 0xee, 0xc5, 0xc2, 0xbe, 0x0d, 0xf9, 0x45, 0xa7, 0x53, 0x27, - 0x7d, 0x19, 0x71, 0xa8, 0x18, 0x0b, 0x88, 0xd3, 0x8c, 0xa4, 0x9a, 0x2e, 0xc4, 0x18, 0x16, 0x30, - 0xac, 0x4a, 0xed, 0x1f, 0x0f, 0xc2, 0x88, 0x16, 0xf9, 0x43, 0xcf, 0xf1, 0x80, 0xb4, 0xfd, 0xa4, - 0xae, 0x4b, 0x27, 0x1b, 0xb3, 0x12, 0xba, 0x7f, 0x02, 0xb2, 0xe7, 0x86, 0x5c, 0xe4, 0x18, 0xfb, - 0x07, 0x0b, 0x38, 0x56, 0x18, 0x68, 0x0a, 0xf2, 0x35, 0xd2, 0x8e, 0x1a, 0x4c, 0x9a, 0x0e, 0x96, - 0x8b, 0xb4, 0xa9, 0xf3, 0x14, 0x80, 0x39, 0x9c, 0x22, 0xec, 0x90, 0xa8, 0xda, 0x60, 0xc6, 0xc6, - 0x22, 0x47, 0x58, 0xa0, 0x00, 0xcc, 0xe1, 0x29, 0x0e, 0xa9, 0xfc, 0xe9, 0x3b, 0xa4, 0x86, 0x32, - 0x76, 0x48, 0xa1, 0x36, 0x5c, 0x08, 0xc3, 0xc6, 0x66, 0xe0, 0xee, 0x39, 0x11, 0x89, 0x57, 0xce, - 0xf0, 0x49, 0xf8, 0x5c, 0x39, 0x3a, 0x9c, 0xba, 0x50, 0xa9, 0x2c, 0x25, 0xa9, 0xe0, 0x34, 0xd2, - 0xa8, 0x02, 0x97, 0x5c, 0x2f, 0x24, 0xd5, 0x4e, 0x40, 0x96, 0xeb, 0x9e, 0x1f, 0x90, 0x25, 0x3f, - 0xa4, 0xe4, 0x44, 0xa8, 0x9e, 0x72, 0x74, 0x2f, 0xa7, 0x21, 0xe1, 0xf4, 0xba, 0x68, 0x11, 0xce, - 0xd7, 0xdc, 0xd0, 0xd9, 0x6e, 0x92, 0x4a, 0x67, 0xbb, 0xe5, 0xd3, 0x0b, 0x1b, 0x8f, 0xee, 0x29, - 0x94, 0x1f, 0x97, 0xa6, 0x89, 0xf9, 0x24, 0x02, 0xee, 0xae, 0x63, 0x7f, 0xcf, 0x82, 0x51, 0x3d, - 0x0c, 0x83, 0xea, 0xb0, 0xd0, 0x98, 0x5f, 0xa8, 0x70, 0x29, 0x9b, 0xdd, 0x59, 0xba, 0xa4, 0x68, - 0xc6, 0x77, 0xbe, 0x18, 0x86, 0x35, 0x9e, 0x7d, 0x84, 0x9e, 0x3e, 0x03, 0xf9, 0x1d, 0x9f, 0x1e, - 0xf5, 0x03, 0xa6, 0x65, 0x76, 0x81, 0x02, 0x31, 0x2f, 0xb3, 0xff, 0xa7, 0x05, 0x97, 0xd3, 0x23, - 0x4c, 0x3e, 0x0c, 0x9d, 0xbc, 0x0e, 0x40, 0xbb, 0x62, 0x88, 0x4b, 0x2d, 0x7e, 0x58, 0x96, 0x60, - 0x0d, 0xab, 0xbf, 0x6e, 0xff, 0x84, 0xaa, 0x9b, 0x31, 0x9f, 0x2f, 0x5b, 0x30, 0x46, 0xd9, 0xae, - 0x04, 0xdb, 0x46, 0x6f, 0x37, 0xb2, 0xe9, 0xad, 0x22, 0x1b, 0x1b, 0xa0, 0x0d, 0x30, 0x36, 0x99, - 0xa3, 0x9f, 0x85, 0xa2, 0x53, 0xab, 0x05, 0x24, 0x0c, 0x95, 0x2b, 0x87, 0xb9, 0x45, 0x67, 0x25, - 0x10, 0xc7, 0xe5, 0x54, 0xc4, 0x35, 0x6a, 0x3b, 0x21, 0x95, 0x1a, 0xc2, 0xee, 0xa6, 0x44, 0x1c, - 0x65, 0x42, 0xe1, 0x58, 0x61, 0xd8, 0x7f, 0x73, 0x10, 0x4c, 0xde, 0xa8, 0x06, 0xe3, 0xbb, 0xc1, - 0xf6, 0x1c, 0x73, 0xdd, 0x3e, 0x88, 0x13, 0xfd, 0xc2, 0xd1, 0xe1, 0xd4, 0xf8, 0x8a, 0x49, 0x01, - 0x27, 0x49, 0x0a, 0x2e, 0x2b, 0xe4, 0x20, 0x72, 0xb6, 0x1f, 0xe4, 0x20, 0x92, 0x5c, 0x74, 0x0a, - 0x38, 0x49, 0x12, 0xbd, 0x04, 0x23, 0xbb, 0xc1, 0xb6, 0x14, 0xa0, 0x49, 0xcf, 0xf5, 0x4a, 0x5c, - 0x84, 0x75, 0x3c, 0x3a, 0x84, 0xbb, 0xc1, 0x36, 0x3d, 0x70, 0x64, 0x28, 0xb6, 0x1a, 0xc2, 0x15, - 0x01, 0xc7, 0x0a, 0x03, 0xb5, 0x01, 0xed, 0xca, 0xd1, 0x53, 0x8e, 0x6a, 0x21, 0xe7, 0xfb, 0xf7, - 0x73, 0xb3, 0xb0, 0x95, 0x95, 0x2e, 0x3a, 0x38, 0x85, 0x36, 0x7a, 0x0d, 0xae, 0xec, 0x06, 0xdb, - 0xe2, 0x18, 0xde, 0x0c, 0x5c, 0xaf, 0xea, 0xb6, 0x8d, 0xb0, 0xeb, 0x29, 0xd1, 0xdc, 0x2b, 0x2b, - 0xe9, 0x68, 0xb8, 0x57, 0x7d, 0xfb, 0x9f, 0x0f, 0x00, 0x8b, 0x67, 0xa5, 0x9a, 0x45, 0x8b, 0x44, - 0x0d, 0xbf, 0x96, 0xd4, 0x2c, 0xd6, 0x18, 0x14, 0x8b, 0x52, 0x19, 0x20, 0x93, 0xeb, 0x11, 0x20, - 0xb3, 0x0f, 0xc3, 0x0d, 0xe2, 0xd4, 0x48, 0x20, 0x0d, 0x61, 0xab, 0xd9, 0x44, 0xe0, 0x2e, 0x31, - 0xa2, 0xf1, 0x05, 0x97, 0xff, 0x0f, 0xb1, 0xe4, 0x86, 0x3e, 0x06, 0xe7, 0xa8, 0x8e, 0xe0, 0x77, - 0x22, 0x69, 0xf5, 0x1d, 0x64, 0x56, 0x5f, 0x76, 0xde, 0x6d, 0x19, 0x25, 0x38, 0x81, 0x89, 0xe6, - 0x61, 0x42, 0x58, 0x68, 0x95, 0x81, 0x4d, 0x0c, 0xac, 0x8a, 0x87, 0xaf, 0x24, 0xca, 0x71, 0x57, - 0x0d, 0x2a, 0x91, 0xb7, 0xfd, 0x1a, 0x77, 0xd2, 0x69, 0x12, 0xb9, 0xec, 0xd7, 0x0e, 0x30, 0x2b, - 0xa1, 0xda, 0xb8, 0x3c, 0xa9, 0x2a, 0xbb, 0x6e, 0xfb, 0x36, 0x09, 0xdc, 0x9d, 0x03, 0x76, 0xac, - 0x16, 0x62, 0x6d, 0x7c, 0xb9, 0x0b, 0x03, 0xa7, 0xd4, 0xb2, 0xbf, 0x41, 0xcf, 0x24, 0x2d, 0x34, - 0xf9, 0x7e, 0x91, 0x4b, 0x61, 0x3c, 0x31, 0xfc, 0xee, 0xb5, 0x94, 0xc1, 0xc4, 0xdc, 0x67, 0x52, - 0xec, 0xef, 0x52, 0x31, 0xab, 0x66, 0xaf, 0x0f, 0xdb, 0xe4, 0x33, 0xfa, 0x2d, 0xbf, 0x97, 0xc2, - 0xf8, 0x59, 0x28, 0xb2, 0x1f, 0x0b, 0x81, 0xdf, 0x12, 0x26, 0x42, 0x9c, 0xe5, 0x2a, 0x13, 0xb7, - 0x59, 0x26, 0x72, 0x6f, 0x4b, 0x46, 0x38, 0xe6, 0x69, 0xfb, 0x30, 0x91, 0xc4, 0x46, 0x9f, 0x82, - 0xd1, 0x50, 0x4a, 0xad, 0x38, 0xf4, 0xaf, 0x4f, 0xe9, 0xc6, 0x0c, 0x56, 0x15, 0xad, 0x3a, 0x36, - 0x88, 0xd9, 0x1b, 0x30, 0x94, 0xe9, 0x10, 0xda, 0xdf, 0xb4, 0xa0, 0xc8, 0x5c, 0x06, 0xf5, 0xc0, - 0x69, 0xc5, 0x55, 0x06, 0x8e, 0x19, 0xf5, 0x10, 0x86, 0xf9, 0xe5, 0x42, 0xba, 0xda, 0x33, 0x58, - 0x40, 0xfc, 0x51, 0x58, 0xbc, 0x80, 0xf8, 0x2d, 0x26, 0xc4, 0x92, 0x93, 0xfd, 0x85, 0x1c, 0x0c, - 0x2d, 0x7b, 0xed, 0xce, 0x9f, 0xfb, 0x87, 0x49, 0x6b, 0x30, 0xb8, 0x1c, 0x91, 0x96, 0xf9, 0x7e, - 0x6e, 0xb4, 0xfc, 0xac, 0xfe, 0x76, 0xae, 0x64, 0xbe, 0x9d, 0xc3, 0xce, 0xbe, 0x8c, 0x44, 0x11, - 0xc6, 0xad, 0x38, 0xfc, 0xf1, 0x05, 0x28, 0xae, 0x3a, 0xdb, 0xa4, 0xb9, 0x42, 0x0e, 0x42, 0x7a, - 0xab, 0xe1, 0x5e, 0x51, 0x2b, 0xbe, 0xd5, 0x18, 0x1e, 0xcc, 0x79, 0x38, 0xc7, 0xb0, 0xd5, 0x66, - 0xa0, 0x8a, 0x19, 0x89, 0x5f, 0xae, 0x58, 0xa6, 0x62, 0xa6, 0xbd, 0x5a, 0xd1, 0xb0, 0xec, 0x69, - 0x18, 0x89, 0xa9, 0xf4, 0xc1, 0xf5, 0x4f, 0x73, 0x30, 0x66, 0xd8, 0xe8, 0x0c, 0xcf, 0x85, 0x75, - 0x5f, 0xcf, 0x85, 0xe1, 0x49, 0xc8, 0x3d, 0x6a, 0x4f, 0xc2, 0xc0, 0xd9, 0x7b, 0x12, 0xcc, 0x49, - 0x1a, 0xec, 0x6b, 0x92, 0x9a, 0x30, 0xb8, 0xea, 0x7a, 0xbb, 0xfd, 0xc9, 0x99, 0xb0, 0xea, 0xb7, - 0xbb, 0xe4, 0x4c, 0x85, 0x02, 0x31, 0x2f, 0x93, 0x87, 0xd2, 0x40, 0xfa, 0xa1, 0x64, 0x7f, 0xde, - 0x82, 0xf3, 0x6b, 0xa4, 0xe5, 0xbb, 0xef, 0x38, 0x71, 0x84, 0x15, 0xad, 0xd4, 0x70, 0x23, 0x11, - 0x50, 0xa2, 0x2a, 0x2d, 0xb9, 0x11, 0xa6, 0xf0, 0xfb, 0x58, 0x7e, 0x58, 0x1c, 0x38, 0x55, 0x3c, - 0xd7, 0x63, 0x0d, 0x30, 0x8e, 0x9d, 0x92, 0x05, 0x38, 0xc6, 0xb1, 0x7f, 0xdf, 0x82, 0x61, 0xde, - 0x08, 0x22, 0x69, 0x5b, 0x3d, 0x68, 0x37, 0x20, 0xcf, 0xea, 0x89, 0xe5, 0xb4, 0x98, 0x81, 0x47, - 0x80, 0x92, 0xe3, 0x8b, 0x9f, 0xfd, 0xc4, 0x9c, 0x01, 0x53, 0xc7, 0x9c, 0xbb, 0xb3, 0x2a, 0xb8, - 0x2c, 0x56, 0xc7, 0x18, 0x14, 0x8b, 0x52, 0xfb, 0xeb, 0x03, 0x50, 0x90, 0xbe, 0x56, 0xfe, 0x20, - 0xc2, 0xf3, 0xfc, 0xc8, 0xe1, 0xae, 0x48, 0x2e, 0x24, 0x33, 0x08, 0x17, 0x92, 0x1c, 0xa6, 0x67, - 0x63, 0xea, 0xdc, 0xe2, 0xaf, 0x94, 0x6b, 0xad, 0x04, 0xeb, 0x8d, 0x40, 0x9f, 0x81, 0xa1, 0x26, - 0xdd, 0xf6, 0x52, 0x66, 0xde, 0xce, 0xb0, 0x39, 0x4c, 0x9e, 0x88, 0x96, 0xa8, 0x11, 0xe2, 0x40, - 0x2c, 0xb8, 0x4e, 0x7e, 0x02, 0x26, 0x92, 0xad, 0x4e, 0x71, 0x2f, 0x5c, 0x34, 0x4e, 0x4d, 0xcd, - 0x1b, 0x30, 0xf9, 0x97, 0x85, 0xd8, 0x3a, 0x79, 0x55, 0xfb, 0x55, 0x18, 0x59, 0x23, 0x51, 0xe0, - 0x56, 0x19, 0x81, 0xfb, 0x2d, 0xae, 0xbe, 0x0e, 0xee, 0x2f, 0xb2, 0xc5, 0x4a, 0x69, 0x86, 0xe8, - 0x3d, 0x80, 0x76, 0xe0, 0x53, 0xbd, 0x9c, 0x74, 0xe4, 0x64, 0x67, 0xa0, 0x6e, 0x6f, 0x2a, 0x9a, - 0xdc, 0x49, 0x15, 0xff, 0xc7, 0x1a, 0x3f, 0xfb, 0x1a, 0xe4, 0xd7, 0x3a, 0x11, 0xb9, 0x7b, 0x7f, - 0x51, 0x61, 0x7f, 0x0a, 0x46, 0x19, 0xea, 0x92, 0xdf, 0xa4, 0xc7, 0x13, 0xed, 0x69, 0x8b, 0xfe, - 0x4f, 0x9a, 0x05, 0x19, 0x12, 0xe6, 0x65, 0x74, 0x07, 0x34, 0xfc, 0x66, 0x8d, 0x04, 0x62, 0x3c, - 0xd4, 0xfc, 0x2e, 0x31, 0x28, 0x16, 0xa5, 0xf6, 0x2f, 0xe6, 0x60, 0x84, 0x55, 0x14, 0xd2, 0xe3, - 0x00, 0x86, 0x1b, 0x9c, 0x8f, 0x18, 0x92, 0x0c, 0x62, 0x6a, 0xf4, 0xd6, 0x6b, 0xea, 0x2e, 0x07, - 0x60, 0xc9, 0x8f, 0xb2, 0xde, 0x77, 0xdc, 0x88, 0xb2, 0xce, 0x9d, 0x2e, 0xeb, 0x3b, 0x9c, 0x0d, - 0x96, 0xfc, 0xec, 0x5f, 0xcf, 0x01, 0xac, 0xfb, 0x35, 0x82, 0x49, 0xd8, 0x69, 0x46, 0xe8, 0xe7, - 0x20, 0xdf, 0x6e, 0x38, 0x61, 0xd2, 0xd4, 0x9f, 0xdf, 0xa4, 0xc0, 0x7b, 0x87, 0x53, 0x45, 0x8a, - 0xcb, 0xfe, 0x60, 0x8e, 0xa8, 0x87, 0xb3, 0xe6, 0x8e, 0x0f, 0x67, 0x45, 0x6d, 0x18, 0xf6, 0x3b, - 0x11, 0x55, 0xca, 0xc4, 0xa9, 0x96, 0x81, 0xa7, 0x6b, 0x83, 0x13, 0xe4, 0x31, 0xa0, 0xe2, 0x0f, - 0x96, 0x6c, 0xd0, 0xcb, 0x50, 0x68, 0x07, 0x7e, 0x9d, 0x1e, 0x52, 0xe2, 0x1c, 0x7b, 0x52, 0x1e, - 0xfc, 0x9b, 0x02, 0x7e, 0x4f, 0xfb, 0x8d, 0x15, 0xb6, 0xfd, 0xc3, 0x71, 0x3e, 0x2e, 0x62, 0x71, - 0x4c, 0x42, 0xce, 0x95, 0x37, 0x5c, 0x10, 0x24, 0x72, 0xcb, 0xf3, 0x38, 0xe7, 0xd6, 0xd4, 0x3a, - 0xce, 0xf5, 0x3c, 0xf2, 0x5e, 0x82, 0x91, 0x9a, 0x1b, 0xb6, 0x9b, 0xce, 0xc1, 0x7a, 0x8a, 0x79, - 0x61, 0x3e, 0x2e, 0xc2, 0x3a, 0x1e, 0x7a, 0x41, 0x04, 0x2f, 0x0f, 0x1a, 0x57, 0x4a, 0x19, 0xbc, - 0x5c, 0xa0, 0xcd, 0xd3, 0xe2, 0x96, 0x5f, 0x86, 0x51, 0x79, 0x88, 0x33, 0x2e, 0xfc, 0x3a, 0xa9, - 0xe2, 0x45, 0xb7, 0xb4, 0x32, 0x6c, 0x60, 0x76, 0xa9, 0x1c, 0x43, 0x67, 0xaf, 0x72, 0x7c, 0x1c, - 0xc6, 0xe4, 0x5f, 0xa6, 0x07, 0x94, 0x2e, 0xb2, 0xd6, 0x2b, 0xb3, 0xd7, 0x96, 0x5e, 0x88, 0x4d, - 0xdc, 0x78, 0xd1, 0x0e, 0xf7, 0xbb, 0x68, 0xaf, 0x03, 0x6c, 0xfb, 0x1d, 0xaf, 0xe6, 0x04, 0x07, - 0xcb, 0xf3, 0x22, 0xd4, 0x49, 0x69, 0x38, 0x65, 0x55, 0x82, 0x35, 0x2c, 0x7d, 0xa1, 0x17, 0xef, - 0xb3, 0xd0, 0x3f, 0x05, 0x45, 0x16, 0x16, 0x46, 0x6a, 0xb3, 0x91, 0x08, 0x02, 0x38, 0x49, 0x04, - 0x91, 0x52, 0x3b, 0x2a, 0x92, 0x08, 0x8e, 0xe9, 0xa1, 0x37, 0x01, 0x76, 0x5c, 0xcf, 0x0d, 0x1b, - 0x8c, 0xfa, 0xc8, 0x89, 0xa9, 0xab, 0x7e, 0x2e, 0x28, 0x2a, 0x58, 0xa3, 0x88, 0xde, 0x80, 0xf3, - 0x24, 0x8c, 0xdc, 0x96, 0x13, 0x91, 0x9a, 0x7a, 0xcb, 0x51, 0x62, 0x36, 0x11, 0x15, 0x98, 0x77, - 0x33, 0x89, 0x70, 0x2f, 0x0d, 0x88, 0xbb, 0x09, 0x19, 0x3b, 0x72, 0xf2, 0x24, 0x3b, 0x12, 0xfd, - 0x99, 0x05, 0xe7, 0x03, 0xc2, 0x3d, 0xc3, 0xa1, 0x6a, 0xd8, 0x25, 0x26, 0x2f, 0xab, 0x59, 0x24, - 0xd5, 0x90, 0x9b, 0x7d, 0x1a, 0x27, 0xb9, 0x70, 0x45, 0x81, 0xc8, 0xde, 0x77, 0x95, 0xdf, 0x4b, - 0x03, 0x7e, 0xfe, 0xfb, 0x53, 0x53, 0xdd, 0x19, 0x5e, 0x14, 0x71, 0xba, 0xf3, 0xfe, 0xc6, 0xf7, - 0xa7, 0x26, 0xe4, 0xff, 0x78, 0xd0, 0xba, 0x3a, 0x49, 0xcf, 0xbd, 0xb6, 0x5f, 0x5b, 0xde, 0x14, - 0xd1, 0x1a, 0xea, 0xdc, 0xdb, 0xa4, 0x40, 0xcc, 0xcb, 0xd0, 0xf3, 0x50, 0xa8, 0x39, 0xa4, 0xe5, - 0x7b, 0xa4, 0x56, 0x1a, 0x8b, 0xdd, 0x61, 0xf3, 0x02, 0x86, 0x55, 0x29, 0x6a, 0xc2, 0x90, 0xcb, - 0xee, 0xc6, 0x22, 0x34, 0x2b, 0x83, 0x0b, 0x39, 0xbf, 0x6b, 0xcb, 0xc0, 0x2c, 0x26, 0x84, 0x05, - 0x0f, 0x5d, 0xea, 0x8f, 0x9f, 0x8d, 0xd4, 0x7f, 0x1e, 0x0a, 0xd5, 0x86, 0xdb, 0xac, 0x05, 0xc4, - 0x2b, 0x4d, 0xb0, 0x4b, 0x22, 0x1b, 0x89, 0x39, 0x01, 0xc3, 0xaa, 0x14, 0xfd, 0x25, 0x18, 0xf3, - 0x3b, 0x11, 0xdb, 0xe4, 0x74, 0xfe, 0xc3, 0xd2, 0x79, 0x86, 0xce, 0x1c, 0xed, 0x1b, 0x7a, 0x01, - 0x36, 0xf1, 0xa8, 0xb0, 0x6d, 0xf8, 0x61, 0x44, 0xff, 0x30, 0x61, 0x7b, 0xd9, 0x14, 0xb6, 0x4b, - 0x5a, 0x19, 0x36, 0x30, 0xd1, 0xd7, 0x2c, 0x38, 0xdf, 0x4a, 0x5e, 0x5d, 0x4a, 0x57, 0xd8, 0xc8, - 0x54, 0xb2, 0x50, 0x71, 0x13, 0xa4, 0x79, 0x3c, 0x62, 0x17, 0x18, 0x77, 0x37, 0x82, 0xbd, 0x47, - 0x0d, 0x0f, 0xbc, 0x6a, 0x23, 0xf0, 0x3d, 0xb3, 0x79, 0x8f, 0x67, 0xf5, 0x7e, 0x80, 0xed, 0xb2, - 0x34, 0x16, 0xe5, 0xc7, 0x8f, 0x0e, 0xa7, 0x2e, 0xa5, 0x16, 0xe1, 0xf4, 0x46, 0x4d, 0xce, 0xc3, - 0xe5, 0xf4, 0x9d, 0x7a, 0x3f, 0x5d, 0x7b, 0x40, 0xd7, 0xb5, 0x17, 0xe0, 0xf1, 0x9e, 0x8d, 0xa2, - 0x32, 0x5f, 0x2a, 0x66, 0x96, 0x29, 0xf3, 0xbb, 0x14, 0xa9, 0x73, 0x30, 0xaa, 0xe7, 0xe5, 0x61, - 0x51, 0x0f, 0xda, 0x5b, 0x68, 0xf4, 0x1e, 0x14, 0xfd, 0x4a, 0xe6, 0xe1, 0x03, 0x1b, 0x95, 0xae, - 0xf0, 0x01, 0x05, 0xc2, 0x31, 0xc3, 0x7e, 0xa2, 0x1e, 0x52, 0x1f, 0x6e, 0x3f, 0xe2, 0x66, 0x9f, - 0x38, 0xea, 0xe1, 0x3f, 0x0c, 0x42, 0x4c, 0x09, 0xbd, 0x00, 0x05, 0xe2, 0xd5, 0xda, 0xbe, 0xeb, - 0x45, 0x49, 0xbb, 0xcf, 0x4d, 0x01, 0xc7, 0x0a, 0x43, 0x8b, 0x91, 0xc8, 0x1d, 0x1b, 0x23, 0x51, - 0x83, 0x71, 0x87, 0x99, 0xf0, 0x63, 0x0f, 0xf7, 0xc0, 0x89, 0x5d, 0x52, 0xb3, 0x26, 0x05, 0x9c, - 0x24, 0x49, 0xb9, 0x84, 0x71, 0x55, 0xc6, 0x65, 0xf0, 0xc4, 0x5c, 0x2a, 0x26, 0x05, 0x9c, 0x24, - 0x89, 0xde, 0x80, 0x52, 0x95, 0x3d, 0x84, 0xe1, 0x7d, 0x5c, 0xde, 0x59, 0xf7, 0xa3, 0xcd, 0x80, - 0x84, 0xc4, 0xe3, 0x11, 0x08, 0x85, 0xf2, 0xd3, 0x62, 0x14, 0x4a, 0x73, 0x3d, 0xf0, 0x70, 0x4f, - 0x0a, 0x54, 0xab, 0x63, 0xfe, 0x07, 0x37, 0x3a, 0xd8, 0xf2, 0x77, 0x89, 0x74, 0x8e, 0x28, 0xad, - 0xae, 0xa2, 0x17, 0x62, 0x13, 0x17, 0xfd, 0x8a, 0x05, 0x63, 0x4d, 0x69, 0xc6, 0xc3, 0x9d, 0xa6, - 0xcc, 0xfc, 0x83, 0x33, 0x59, 0x7e, 0xab, 0x3a, 0x65, 0x2e, 0xf0, 0x0d, 0x10, 0x36, 0x79, 0xdb, - 0xdf, 0xb5, 0x60, 0x22, 0x59, 0x0d, 0xed, 0xc2, 0x53, 0x2d, 0x27, 0xd8, 0x5d, 0xf6, 0x76, 0x02, - 0x16, 0x22, 0x1a, 0xf1, 0x59, 0x9d, 0xdd, 0x89, 0x48, 0x30, 0xef, 0x1c, 0xf0, 0x40, 0xb0, 0xbc, - 0x4a, 0x56, 0xf6, 0xd4, 0xda, 0x71, 0xc8, 0xf8, 0x78, 0x5a, 0xa8, 0x02, 0x97, 0x28, 0xc2, 0x3c, - 0x69, 0x12, 0x2a, 0xa1, 0x62, 0x26, 0x39, 0xc6, 0x44, 0x85, 0x3a, 0xac, 0xa5, 0x21, 0xe1, 0xf4, - 0xba, 0x76, 0x01, 0x86, 0x78, 0x78, 0xbc, 0xfd, 0xef, 0x73, 0x20, 0x4f, 0xd2, 0x3f, 0xdf, 0x26, - 0x73, 0x64, 0xc3, 0x50, 0xc0, 0x6e, 0xc3, 0xe2, 0xa2, 0xc6, 0x94, 0x1a, 0x7e, 0x3f, 0xc6, 0xa2, - 0x84, 0xaa, 0x18, 0xe4, 0xae, 0x1b, 0xcd, 0xf9, 0x35, 0x79, 0x3d, 0x63, 0x2a, 0xc6, 0x4d, 0x01, - 0xc3, 0xaa, 0xd4, 0xfe, 0x25, 0x0b, 0xc6, 0x68, 0x2f, 0x9b, 0x4d, 0xd2, 0xac, 0x44, 0xa4, 0x1d, - 0xa2, 0x10, 0xf2, 0x21, 0xfd, 0x91, 0x9d, 0x99, 0x21, 0x7e, 0x15, 0x41, 0xda, 0x9a, 0x41, 0x95, - 0x32, 0xc1, 0x9c, 0x97, 0xfd, 0xad, 0x01, 0x28, 0xaa, 0xc1, 0xee, 0xc3, 0x4a, 0x7b, 0x3d, 0xce, - 0xdd, 0xc0, 0xa5, 0x61, 0x49, 0xcb, 0xdb, 0x40, 0xef, 0x54, 0xb3, 0xde, 0x01, 0x7f, 0x7e, 0x19, - 0x27, 0x71, 0x78, 0xc1, 0x74, 0x07, 0x5d, 0xd6, 0x7d, 0x0c, 0x1a, 0xbe, 0xf0, 0x0b, 0xdd, 0xd5, - 0xbd, 0x71, 0x83, 0x59, 0x9d, 0x2c, 0xca, 0xd5, 0xd0, 0xdb, 0x0d, 0x97, 0x48, 0x40, 0x96, 0xef, - 0x2b, 0x01, 0xd9, 0x35, 0x18, 0x24, 0x5e, 0xa7, 0xc5, 0x42, 0xe4, 0x8b, 0x4c, 0xa7, 0x1a, 0xbc, - 0xe9, 0x75, 0x5a, 0x66, 0xcf, 0x18, 0x0a, 0xfa, 0x04, 0x8c, 0xd4, 0x48, 0x58, 0x0d, 0x5c, 0xf6, - 0xa6, 0x50, 0x5c, 0x4a, 0x9f, 0x64, 0x37, 0xfd, 0x18, 0x6c, 0x56, 0xd4, 0x2b, 0xd8, 0xef, 0xc0, - 0xd0, 0x66, 0xb3, 0x53, 0x77, 0x3d, 0xd4, 0x86, 0x21, 0xfe, 0xc2, 0x50, 0x9c, 0xbc, 0x19, 0x28, - 0xea, 0x7c, 0xb7, 0x6b, 0x91, 0xe1, 0xfc, 0x71, 0x8c, 0xe0, 0x63, 0xff, 0x33, 0x0b, 0xe8, 0xad, - 0x62, 0x71, 0x0e, 0xfd, 0x15, 0x28, 0x84, 0xf2, 0xf9, 0x28, 0x5f, 0x26, 0x3f, 0xa5, 0x22, 0x48, - 0x05, 0xfc, 0xde, 0xe1, 0xd4, 0x18, 0x43, 0x56, 0x2f, 0x3e, 0x55, 0x15, 0xd4, 0x84, 0x31, 0x66, - 0x47, 0x95, 0xe7, 0x91, 0xb0, 0x7c, 0xdf, 0xe8, 0xf3, 0x51, 0x9e, 0x5e, 0x55, 0x48, 0x67, 0x1d, - 0x84, 0x4d, 0xe2, 0xf6, 0x1f, 0x0c, 0x82, 0x66, 0x6e, 0xec, 0x63, 0x79, 0xbf, 0x9d, 0x30, 0x2e, - 0xaf, 0x65, 0x62, 0x5c, 0x96, 0x16, 0x5b, 0x2e, 0x32, 0x4c, 0x7b, 0x32, 0x6d, 0x54, 0x83, 0x34, - 0xdb, 0x62, 0x73, 0xa8, 0x46, 0x2d, 0x91, 0x66, 0x1b, 0xb3, 0x12, 0xf5, 0xbc, 0x60, 0xb0, 0xe7, - 0xf3, 0x82, 0x06, 0xe4, 0xeb, 0x4e, 0xa7, 0x4e, 0x44, 0xd4, 0x48, 0x06, 0x7e, 0x04, 0x16, 0x6f, - 0xc9, 0xfd, 0x08, 0xec, 0x27, 0xe6, 0x0c, 0xe8, 0xee, 0x6c, 0x48, 0x3f, 0xaf, 0x30, 0x08, 0x65, - 0xb0, 0x3b, 0x95, 0xeb, 0x98, 0xef, 0x4e, 0xf5, 0x17, 0xc7, 0xcc, 0xe8, 0x7d, 0xb1, 0xca, 0xdf, - 0xf2, 0x8a, 0x03, 0x7f, 0x39, 0x8b, 0xf7, 0x13, 0x8c, 0x20, 0xbf, 0x2f, 0x8a, 0x3f, 0x58, 0xb2, - 0xb1, 0x67, 0x60, 0x44, 0x4b, 0x23, 0x46, 0xa7, 0x41, 0x3d, 0x23, 0xd5, 0xa6, 0x61, 0xde, 0x89, - 0x1c, 0xcc, 0x4a, 0xec, 0xbf, 0x37, 0x00, 0xea, 0xde, 0xae, 0x47, 0xfb, 0x3b, 0x55, 0xed, 0xd1, - 0xbb, 0xf1, 0xcc, 0xcc, 0xf7, 0xb0, 0x28, 0xa5, 0x4a, 0x51, 0x8b, 0x04, 0x75, 0x75, 0x53, 0x10, - 0xf2, 0x55, 0x29, 0x45, 0x6b, 0x7a, 0x21, 0x36, 0x71, 0xa9, 0x46, 0xdb, 0x72, 0x3c, 0x77, 0x87, - 0x84, 0x51, 0x32, 0x68, 0x6b, 0x4d, 0xc0, 0xb1, 0xc2, 0x40, 0x8b, 0x70, 0x3e, 0x24, 0xd1, 0xc6, - 0xbe, 0x47, 0x02, 0xf5, 0xfc, 0x4d, 0xbc, 0x87, 0x54, 0x81, 0x8c, 0x95, 0x24, 0x02, 0xee, 0xae, - 0x93, 0x1a, 0xe8, 0x92, 0x3f, 0x71, 0xa0, 0xcb, 0x3c, 0x4c, 0xec, 0x38, 0x6e, 0xb3, 0x13, 0x90, - 0x9e, 0xe1, 0x32, 0x0b, 0x89, 0x72, 0xdc, 0x55, 0x83, 0xc5, 0xd2, 0x36, 0x9d, 0x7a, 0x58, 0x1a, - 0xd6, 0x62, 0x69, 0x29, 0x00, 0x73, 0xb8, 0xfd, 0x8f, 0x2d, 0xe0, 0x0f, 0xd1, 0x67, 0x77, 0x76, - 0x5c, 0xcf, 0x8d, 0x0e, 0xd0, 0x6f, 0x5a, 0x30, 0xe1, 0xf9, 0x35, 0x32, 0xeb, 0x45, 0xae, 0x04, - 0x66, 0x97, 0x63, 0x89, 0xf1, 0x5a, 0x4f, 0x90, 0xe7, 0xaf, 0x1a, 0x93, 0x50, 0xdc, 0xd5, 0x0c, - 0xfb, 0x0a, 0x5c, 0x4a, 0x25, 0x60, 0x7f, 0x77, 0x00, 0xcc, 0xf7, 0xf4, 0xe8, 0x55, 0xc8, 0x37, - 0xd9, 0x0b, 0x4f, 0xeb, 0x01, 0x13, 0x25, 0xb0, 0xb1, 0xe2, 0x4f, 0x40, 0x39, 0x25, 0x34, 0x0f, - 0x23, 0xec, 0x91, 0xbe, 0x78, 0x7f, 0xcb, 0x97, 0xa2, 0x1d, 0x27, 0xa1, 0x54, 0x45, 0xf7, 0xcc, - 0xbf, 0x58, 0xaf, 0x86, 0xde, 0x85, 0xe1, 0x6d, 0x9e, 0x87, 0x26, 0x3b, 0xc3, 0xbe, 0x48, 0x6c, - 0xc3, 0xb4, 0x08, 0x99, 0xe5, 0xe6, 0x5e, 0xfc, 0x13, 0x4b, 0x8e, 0xe8, 0x00, 0x0a, 0x8e, 0x9c, - 0xd3, 0xc1, 0xac, 0xa2, 0x2f, 0x8d, 0xf5, 0xc3, 0x75, 0x3b, 0x35, 0x87, 0x8a, 0x5d, 0xc2, 0x51, - 0x9e, 0xef, 0xcb, 0x51, 0xfe, 0x4d, 0x0b, 0x20, 0xce, 0x50, 0x87, 0xee, 0x42, 0x21, 0xbc, 0x61, - 0x5c, 0xaf, 0xb3, 0x78, 0xd0, 0x26, 0x28, 0x6a, 0x8f, 0x3e, 0x04, 0x04, 0x2b, 0x6e, 0xf7, 0x33, - 0x09, 0xfc, 0xa9, 0x05, 0x17, 0xd3, 0x32, 0xe9, 0x3d, 0xc2, 0x16, 0x9f, 0xd4, 0x1a, 0x20, 0x2a, - 0x6c, 0x06, 0x64, 0xc7, 0xbd, 0x9b, 0x74, 0xe9, 0xaf, 0xc8, 0x02, 0x1c, 0xe3, 0xd8, 0xdf, 0x1e, - 0x02, 0xc5, 0xf8, 0x94, 0xac, 0x07, 0xcf, 0xd1, 0xdb, 0x45, 0x3d, 0xce, 0x8f, 0xa4, 0xf0, 0x30, - 0x83, 0x62, 0x51, 0x4a, 0x6f, 0x18, 0x32, 0x7a, 0x4f, 0x88, 0x6c, 0xb6, 0x0a, 0x65, 0x94, 0x1f, - 0x56, 0xa5, 0x69, 0xf6, 0x88, 0xfc, 0x99, 0xd8, 0x23, 0x86, 0xb2, 0xb7, 0x47, 0x5c, 0x83, 0xe1, - 0xc0, 0x6f, 0x92, 0x59, 0xbc, 0x2e, 0xf4, 0xe6, 0x38, 0xaf, 0x17, 0x07, 0x63, 0x59, 0x8e, 0x5e, - 0x82, 0x91, 0x4e, 0x48, 0x2a, 0xf3, 0x2b, 0x73, 0x01, 0xa9, 0x85, 0x22, 0xe0, 0x5f, 0x39, 0xd5, - 0x6e, 0xc5, 0x45, 0x58, 0xc7, 0x43, 0xdf, 0xb6, 0x8e, 0x31, 0x79, 0x14, 0xb3, 0x3a, 0x13, 0x52, - 0xb3, 0x8b, 0xb0, 0x4b, 0xc0, 0x83, 0xd8, 0x51, 0xbe, 0x6e, 0xc1, 0x79, 0xe2, 0x55, 0x83, 0x03, - 0x46, 0x47, 0x50, 0x13, 0x8e, 0xa5, 0x5b, 0x59, 0x6c, 0xbe, 0x9b, 0x49, 0xe2, 0xdc, 0x6a, 0xdc, - 0x05, 0xc6, 0xdd, 0xcd, 0xb0, 0x7f, 0x98, 0x83, 0x0b, 0x29, 0x14, 0x58, 0x70, 0x74, 0x8b, 0x2e, - 0xa0, 0xe5, 0x5a, 0x72, 0xfb, 0xac, 0x08, 0x38, 0x56, 0x18, 0x68, 0x13, 0x2e, 0xee, 0xb6, 0xc2, - 0x98, 0xca, 0x9c, 0xef, 0x45, 0xe4, 0xae, 0xdc, 0x4c, 0xd2, 0x47, 0x74, 0x71, 0x25, 0x05, 0x07, - 0xa7, 0xd6, 0xa4, 0xda, 0x06, 0xf1, 0x9c, 0xed, 0x26, 0x89, 0x8b, 0x44, 0x68, 0xbf, 0xd2, 0x36, - 0x6e, 0x26, 0xca, 0x71, 0x57, 0x0d, 0xf4, 0x25, 0x0b, 0x9e, 0x08, 0x49, 0xb0, 0x47, 0x82, 0x8a, - 0x5b, 0x23, 0x73, 0x9d, 0x30, 0xf2, 0x5b, 0x24, 0x78, 0x40, 0x9b, 0xdc, 0xd4, 0xd1, 0xe1, 0xd4, - 0x13, 0x95, 0xde, 0xd4, 0xf0, 0x71, 0xac, 0xec, 0x2f, 0x59, 0x70, 0xae, 0xc2, 0x6e, 0x89, 0x4a, - 0xe7, 0xcc, 0x3a, 0x1d, 0xd4, 0x73, 0xea, 0x99, 0x67, 0x42, 0x88, 0x99, 0x0f, 0x33, 0xed, 0xb7, - 0x60, 0xa2, 0x42, 0x5a, 0x4e, 0xbb, 0xc1, 0x5e, 0xcd, 0xf0, 0x20, 0x86, 0x19, 0x28, 0x86, 0x12, - 0x96, 0xcc, 0xa3, 0xa9, 0x90, 0x71, 0x8c, 0x83, 0x9e, 0xe5, 0x01, 0x17, 0x32, 0xb2, 0xb8, 0xc8, - 0xb5, 0x73, 0x1e, 0xa5, 0x11, 0x62, 0x59, 0x66, 0xef, 0xc3, 0x68, 0x5c, 0x9d, 0xec, 0xa0, 0x3a, - 0x8c, 0x57, 0xb5, 0xc0, 0xf8, 0x38, 0x66, 0xb6, 0xff, 0x18, 0x7a, 0x26, 0x8b, 0xe6, 0x4c, 0x22, - 0x38, 0x49, 0xd5, 0xfe, 0x4a, 0x0e, 0xc6, 0x15, 0x67, 0xe1, 0x10, 0x78, 0x3f, 0x19, 0x24, 0x82, - 0xb3, 0x78, 0x7e, 0x6e, 0x8e, 0xe4, 0x31, 0x81, 0x22, 0xef, 0x27, 0x03, 0x45, 0x4e, 0x95, 0x7d, - 0x97, 0x8f, 0xe3, 0x9b, 0x39, 0x28, 0xa8, 0xc7, 0xf0, 0xaf, 0x42, 0x9e, 0x5d, 0xa0, 0x1e, 0x4e, - 0x1b, 0x65, 0x97, 0x31, 0xcc, 0x29, 0x51, 0x92, 0xcc, 0xcf, 0xfd, 0xc0, 0x99, 0xc0, 0x8a, 0xdc, - 0xee, 0xe5, 0x04, 0x11, 0xe6, 0x94, 0xd0, 0x0a, 0x0c, 0x10, 0xaf, 0x26, 0xd4, 0xd2, 0x93, 0x13, - 0x64, 0x39, 0x64, 0x6f, 0x7a, 0x35, 0x4c, 0xa9, 0xb0, 0x74, 0x54, 0x5c, 0xfb, 0x18, 0x34, 0xb7, - 0x87, 0x50, 0x3d, 0x44, 0xa9, 0xfd, 0x2b, 0x03, 0x30, 0x54, 0xe9, 0x6c, 0x53, 0x05, 0xfb, 0xb7, - 0x2d, 0xb8, 0xb0, 0x9f, 0xc8, 0x5c, 0x17, 0x2f, 0xd9, 0x5b, 0xd9, 0xd9, 0xfe, 0xf4, 0x58, 0x8b, - 0x27, 0x44, 0xbb, 0x2e, 0xa4, 0x14, 0xe2, 0xb4, 0xe6, 0x18, 0x99, 0xaa, 0x06, 0x4e, 0x25, 0x53, - 0xd5, 0xdd, 0x53, 0x8e, 0xaa, 0x1d, 0xeb, 0x15, 0x51, 0x6b, 0xff, 0x41, 0x1e, 0x80, 0xcf, 0xc6, - 0x46, 0x3b, 0xea, 0xc7, 0x38, 0xf4, 0x32, 0x8c, 0xca, 0x0f, 0x91, 0xac, 0xc7, 0x81, 0x3d, 0xca, - 0xb9, 0xbb, 0xa8, 0x95, 0x61, 0x03, 0x93, 0x5d, 0x08, 0xbc, 0x28, 0x38, 0xe0, 0x4a, 0x63, 0x32, - 0x72, 0x56, 0x95, 0x60, 0x0d, 0x0b, 0x4d, 0x1b, 0xc6, 0x76, 0x9e, 0xb5, 0xe3, 0xdc, 0x31, 0xb6, - 0xf1, 0x4f, 0xc0, 0x39, 0xf3, 0xfd, 0xac, 0xd0, 0x94, 0xd4, 0xeb, 0x75, 0xf3, 0xd9, 0x2d, 0x4e, - 0x60, 0xd3, 0x45, 0x5c, 0x0b, 0x0e, 0x70, 0xc7, 0x13, 0x2a, 0x93, 0x5a, 0xc4, 0xf3, 0x0c, 0x8a, - 0x45, 0x29, 0x1d, 0x05, 0x7e, 0x1a, 0x71, 0xb8, 0x78, 0x00, 0xa9, 0x46, 0xa1, 0xa2, 0x95, 0x61, - 0x03, 0x93, 0x72, 0x10, 0xc6, 0x35, 0x30, 0xb7, 0x49, 0xc2, 0x22, 0xd6, 0x86, 0x73, 0xbe, 0x69, - 0x9b, 0xe0, 0xd1, 0x2c, 0x1f, 0xed, 0x73, 0xe9, 0x19, 0x75, 0xf9, 0x83, 0x9d, 0x84, 0x29, 0x23, - 0x41, 0x9f, 0xea, 0x8c, 0x7a, 0x9c, 0xeb, 0xa8, 0x19, 0x88, 0xd5, 0x33, 0x14, 0x75, 0x13, 0x2e, - 0xb6, 0xfd, 0xda, 0x66, 0xe0, 0xfa, 0x81, 0x1b, 0x1d, 0xcc, 0x35, 0x9d, 0x30, 0x64, 0x0b, 0x63, - 0xcc, 0x54, 0x4e, 0x36, 0x53, 0x70, 0x70, 0x6a, 0x4d, 0xaa, 0xdd, 0xb7, 0x05, 0x90, 0x05, 0x61, - 0xe4, 0xb9, 0x76, 0x2f, 0x11, 0xb1, 0x2a, 0xb5, 0x2f, 0xc0, 0xf9, 0x4a, 0xa7, 0xdd, 0x6e, 0xba, - 0xa4, 0xa6, 0x8c, 0xd9, 0xf6, 0xcf, 0xc3, 0xb8, 0xc8, 0x63, 0xa5, 0x54, 0x81, 0x13, 0x65, 0x5d, - 0xb4, 0xff, 0xcc, 0x82, 0xf1, 0x84, 0xcb, 0x1b, 0xbd, 0x9b, 0x3c, 0xc0, 0x33, 0xf1, 0x4d, 0xe8, - 0x67, 0x37, 0xdf, 0xa4, 0xa9, 0xca, 0x40, 0x43, 0x86, 0x76, 0x66, 0x16, 0x21, 0xcd, 0x02, 0x20, - 0xf9, 0x89, 0xa0, 0xc7, 0x87, 0xda, 0x5f, 0xcc, 0x41, 0x7a, 0x9c, 0x01, 0xfa, 0x4c, 0xf7, 0x00, - 0xbc, 0x9a, 0xe1, 0x00, 0x88, 0x40, 0x87, 0xde, 0x63, 0xe0, 0x99, 0x63, 0xb0, 0x96, 0xd1, 0x18, - 0x08, 0xbe, 0xdd, 0x23, 0xf1, 0xbf, 0x2c, 0x18, 0xd9, 0xda, 0x5a, 0x55, 0xf6, 0x25, 0x0c, 0x97, - 0x43, 0xfe, 0xb2, 0x8d, 0x39, 0x08, 0xe7, 0xfc, 0x56, 0x9b, 0xfb, 0x0b, 0x85, 0x1f, 0x93, 0xa5, - 0x14, 0xab, 0xa4, 0x62, 0xe0, 0x1e, 0x35, 0xd1, 0x32, 0x5c, 0xd0, 0x4b, 0x84, 0x95, 0x50, 0xf8, - 0x2c, 0xf9, 0x5b, 0xef, 0xee, 0x62, 0x9c, 0x56, 0x27, 0x49, 0x4a, 0x98, 0x0a, 0xc5, 0x17, 0x72, - 0xba, 0x48, 0x89, 0x62, 0x9c, 0x56, 0xc7, 0xde, 0x80, 0x11, 0xed, 0x7b, 0x4d, 0xe8, 0x93, 0x30, - 0x51, 0xf5, 0x5b, 0xd2, 0x44, 0xb3, 0x4a, 0xf6, 0x48, 0x53, 0x74, 0x99, 0x59, 0xf1, 0xe6, 0x12, - 0x65, 0xb8, 0x0b, 0xdb, 0xfe, 0xef, 0x57, 0x41, 0x3d, 0x25, 0xe9, 0xe3, 0x84, 0x69, 0xab, 0x08, - 0xac, 0x7c, 0xc6, 0x11, 0x58, 0x4a, 0xd6, 0x26, 0xa2, 0xb0, 0xa2, 0x38, 0x0a, 0x6b, 0x28, 0xeb, - 0x28, 0x2c, 0xa5, 0x30, 0x76, 0x45, 0x62, 0x7d, 0xd5, 0x82, 0x51, 0xcf, 0xaf, 0x11, 0xe5, 0x05, - 0x1a, 0x66, 0x5a, 0xeb, 0x1b, 0xd9, 0x85, 0x96, 0xf2, 0x88, 0x22, 0x41, 0x9e, 0xc7, 0xe9, 0xa9, - 0x23, 0x4a, 0x2f, 0xc2, 0x46, 0x3b, 0xd0, 0x82, 0x66, 0x34, 0xe4, 0x79, 0xa4, 0x9e, 0x4c, 0xbb, - 0x3d, 0xdc, 0xd7, 0x02, 0x78, 0x57, 0xd3, 0x9b, 0x8a, 0x59, 0x19, 0xc3, 0xe4, 0x33, 0x05, 0xcd, - 0xb6, 0x2f, 0xb3, 0xe2, 0xc5, 0xfa, 0x94, 0x0d, 0x43, 0x3c, 0xa0, 0x4f, 0x7c, 0x39, 0x88, 0xb9, - 0x9c, 0x78, 0xb0, 0x1f, 0x16, 0x25, 0x28, 0x92, 0x9e, 0xe6, 0x91, 0xac, 0x72, 0xdc, 0x1a, 0x9e, - 0xec, 0x74, 0x57, 0x33, 0x7a, 0x45, 0xbf, 0x94, 0x8e, 0xf6, 0x73, 0x29, 0x1d, 0xeb, 0x79, 0x21, - 0xfd, 0xb2, 0x05, 0xa3, 0x55, 0x2d, 0xe7, 0x6c, 0xe9, 0xf9, 0xac, 0xbe, 0x12, 0x92, 0x96, 0x1a, - 0x98, 0xbf, 0xa7, 0x34, 0x72, 0xdc, 0x1a, 0xdc, 0x59, 0x1a, 0x24, 0x76, 0x03, 0x67, 0x47, 0xff, - 0xc8, 0xf5, 0xcd, 0x0c, 0x8e, 0x07, 0xe3, 0x46, 0xcf, 0xa7, 0x91, 0xc3, 0xb0, 0xe0, 0x85, 0xde, - 0x83, 0x82, 0x8c, 0x09, 0x15, 0x11, 0x9b, 0x38, 0x0b, 0x0b, 0xb7, 0xe9, 0xbf, 0x92, 0xc9, 0x53, - 0x38, 0x14, 0x2b, 0x8e, 0xa8, 0x01, 0x03, 0x35, 0xa7, 0x2e, 0x62, 0x37, 0xd7, 0xb2, 0xc9, 0x4d, - 0x25, 0x79, 0xb2, 0xeb, 0xd5, 0xfc, 0xec, 0x22, 0xa6, 0x2c, 0xd0, 0xdd, 0x38, 0x69, 0xe7, 0x44, - 0x66, 0xa7, 0xaf, 0xa9, 0x26, 0x71, 0x1b, 0x43, 0x57, 0x0e, 0xd0, 0x9a, 0x70, 0xf9, 0xfd, 0x05, - 0xc6, 0x76, 0x21, 0x9b, 0xe4, 0x56, 0xfc, 0x53, 0x2b, 0xb1, 0xdb, 0x90, 0x72, 0x61, 0x9f, 0x98, - 0xfa, 0x99, 0xac, 0xb8, 0x2c, 0x6d, 0x6d, 0x6d, 0x76, 0x7d, 0x5a, 0xaa, 0x09, 0x43, 0x6d, 0x16, - 0x3e, 0x50, 0xfa, 0xd9, 0xac, 0xce, 0x16, 0x1e, 0x8e, 0xc0, 0xd7, 0x26, 0xff, 0x8d, 0x05, 0x0f, - 0x74, 0x13, 0x86, 0x79, 0xee, 0x69, 0x1e, 0x3b, 0x3b, 0x72, 0x7d, 0xb2, 0x77, 0x06, 0xeb, 0xf8, - 0xa0, 0xe0, 0xff, 0x43, 0x2c, 0xeb, 0xa2, 0xaf, 0x58, 0x70, 0x8e, 0x4a, 0xd4, 0x38, 0x59, 0x76, - 0x09, 0x65, 0x25, 0xb3, 0x6e, 0x85, 0x54, 0x23, 0x91, 0xb2, 0x46, 0x5d, 0x93, 0x96, 0x0d, 0x76, - 0x38, 0xc1, 0x1e, 0xbd, 0x0f, 0x85, 0xd0, 0xad, 0x91, 0xaa, 0x13, 0x84, 0xa5, 0x0b, 0xa7, 0xd3, - 0x94, 0xd8, 0xd7, 0x21, 0x18, 0x61, 0xc5, 0x12, 0xfd, 0x6d, 0xf6, 0xdd, 0x0d, 0xf1, 0x8d, 0x24, - 0xf1, 0xf9, 0xbe, 0x8b, 0xa7, 0xf6, 0xf9, 0x3e, 0xee, 0x02, 0x30, 0xd9, 0xe1, 0x24, 0x7f, 0xf4, - 0xd7, 0x2d, 0xb8, 0xc4, 0x73, 0xa5, 0x26, 0x13, 0xe5, 0x5e, 0x7a, 0x40, 0xf3, 0x0a, 0x0b, 0xfa, - 0x9d, 0x4d, 0x23, 0x89, 0xd3, 0x39, 0xb1, 0x64, 0x6b, 0x66, 0x6e, 0xf3, 0xcb, 0x99, 0xfa, 0xfc, - 0xfa, 0xcf, 0x67, 0x8e, 0x5e, 0x84, 0x91, 0xb6, 0x38, 0x0e, 0xdd, 0xb0, 0xc5, 0x42, 0xb8, 0x07, - 0xf8, 0x33, 0x97, 0xcd, 0x18, 0x8c, 0x75, 0x1c, 0x23, 0xf3, 0xde, 0xb5, 0xe3, 0x32, 0xef, 0xa1, - 0x5b, 0x30, 0x12, 0xf9, 0x4d, 0x12, 0x88, 0x9b, 0x6a, 0x89, 0xad, 0xc0, 0xab, 0x69, 0x7b, 0x6b, - 0x4b, 0xa1, 0xc5, 0x37, 0xd9, 0x18, 0x16, 0x62, 0x9d, 0x0e, 0x8b, 0xc8, 0x14, 0x39, 0x68, 0x03, - 0x76, 0x85, 0x7d, 0x3c, 0x11, 0x91, 0xa9, 0x17, 0x62, 0x13, 0x17, 0x2d, 0xc2, 0xf9, 0x76, 0xd7, - 0x1d, 0x98, 0x3f, 0xe2, 0x50, 0xe1, 0x04, 0xdd, 0x17, 0xe0, 0xee, 0x3a, 0xc6, 0xed, 0xf7, 0x89, - 0xe3, 0x6e, 0xbf, 0x3d, 0xf2, 0xd0, 0x3d, 0xf9, 0x20, 0x79, 0xe8, 0x50, 0x0d, 0x9e, 0x74, 0x3a, - 0x91, 0xcf, 0x52, 0x07, 0x98, 0x55, 0x78, 0x70, 0xea, 0xd3, 0x3c, 0xde, 0xf5, 0xe8, 0x70, 0xea, - 0xc9, 0xd9, 0x63, 0xf0, 0xf0, 0xb1, 0x54, 0xd0, 0x3b, 0x50, 0x20, 0x22, 0x97, 0x5e, 0xe9, 0xa7, - 0xb2, 0x52, 0x12, 0xcc, 0xec, 0x7c, 0x32, 0xd6, 0x90, 0xc3, 0xb0, 0xe2, 0x87, 0xb6, 0x60, 0xa4, - 0xe1, 0x87, 0xd1, 0x6c, 0xd3, 0x75, 0x42, 0x12, 0x96, 0x9e, 0x62, 0x8b, 0x26, 0x55, 0xf7, 0x5a, - 0x92, 0x68, 0xf1, 0x9a, 0x59, 0x8a, 0x6b, 0x62, 0x9d, 0x0c, 0x22, 0xcc, 0xf3, 0xc7, 0x22, 0x73, - 0xa5, 0x57, 0xe6, 0x2a, 0xeb, 0xd8, 0x73, 0x69, 0x94, 0x37, 0xfd, 0x5a, 0xc5, 0xc4, 0x56, 0xae, - 0x3f, 0x1d, 0x88, 0x93, 0x34, 0xd1, 0xcb, 0x30, 0xda, 0xf6, 0x6b, 0x95, 0x36, 0xa9, 0x6e, 0x3a, - 0x51, 0xb5, 0x51, 0x9a, 0x32, 0xad, 0x6e, 0x9b, 0x5a, 0x19, 0x36, 0x30, 0x51, 0x1b, 0x86, 0x5b, - 0xfc, 0x69, 0x6b, 0xe9, 0x99, 0xac, 0xee, 0x36, 0xe2, 0xad, 0x2c, 0xd7, 0x17, 0xc4, 0x1f, 0x2c, - 0xd9, 0xa0, 0x7f, 0x68, 0xc1, 0x78, 0xe2, 0x51, 0x42, 0xe9, 0xa7, 0x33, 0x53, 0x59, 0x4c, 0xc2, - 0xe5, 0xe7, 0xd8, 0xf0, 0x99, 0xc0, 0x7b, 0xdd, 0x20, 0x9c, 0x6c, 0x11, 0x1f, 0x17, 0xf6, 0x3e, - 0xbd, 0xf4, 0x6c, 0x76, 0xe3, 0xc2, 0x08, 0xca, 0x71, 0x61, 0x7f, 0xb0, 0x64, 0x83, 0xae, 0xc1, - 0xb0, 0x48, 0x91, 0x53, 0x7a, 0xce, 0x74, 0xdf, 0x8a, 0x4c, 0x3a, 0x58, 0x96, 0x4f, 0xfe, 0x3c, - 0x9c, 0xef, 0xba, 0xba, 0x9d, 0xe8, 0x91, 0xf4, 0x6f, 0x58, 0xa0, 0xbf, 0x27, 0xcc, 0x3c, 0x81, - 0xf5, 0xcb, 0x30, 0x5a, 0xe5, 0x5f, 0x9e, 0xe1, 0x2f, 0x12, 0x07, 0x4d, 0xfb, 0xe7, 0x9c, 0x56, - 0x86, 0x0d, 0x4c, 0x7b, 0x09, 0x50, 0x77, 0x76, 0xd1, 0x07, 0x4a, 0x7d, 0xf1, 0x3b, 0x16, 0x8c, - 0x19, 0x3a, 0x43, 0xe6, 0x1e, 0xbf, 0x05, 0x40, 0x2d, 0x37, 0x08, 0xfc, 0x40, 0xff, 0x9e, 0x88, - 0x48, 0xa7, 0xc8, 0x52, 0x4d, 0xad, 0x75, 0x95, 0xe2, 0x94, 0x1a, 0xf6, 0x3f, 0x1d, 0x84, 0x38, - 0xd8, 0x56, 0x25, 0x99, 0xb3, 0x7a, 0x26, 0x99, 0x7b, 0x01, 0x0a, 0x6f, 0x85, 0xbe, 0xb7, 0x19, - 0xa7, 0xa2, 0x53, 0x73, 0xf1, 0x4a, 0x65, 0x63, 0x9d, 0x61, 0x2a, 0x0c, 0x86, 0xfd, 0xf6, 0x82, - 0xdb, 0x8c, 0xba, 0x73, 0x95, 0xbd, 0xf2, 0x2a, 0x87, 0x63, 0x85, 0xc1, 0x3e, 0x2d, 0xb2, 0x47, - 0x94, 0x61, 0x3c, 0xfe, 0xb4, 0x08, 0x4f, 0x1c, 0xcc, 0xca, 0xd0, 0x0c, 0x14, 0x95, 0x51, 0x5d, - 0x58, 0xea, 0xd5, 0x48, 0x29, 0xcb, 0x3b, 0x8e, 0x71, 0x98, 0x42, 0x28, 0x0c, 0xb1, 0xc2, 0x84, - 0x52, 0xc9, 0xe2, 0x7a, 0x92, 0x30, 0xed, 0x72, 0xd9, 0x2e, 0xc1, 0x58, 0xb1, 0x4c, 0x73, 0x7b, - 0x16, 0x4f, 0xc3, 0xed, 0xa9, 0x47, 0x7e, 0xe7, 0xfb, 0x8d, 0xfc, 0x36, 0xd7, 0x76, 0xa1, 0xaf, - 0xb5, 0xfd, 0xcb, 0x03, 0x30, 0x7c, 0x9b, 0x04, 0x2c, 0x81, 0xe6, 0x35, 0x18, 0xde, 0xe3, 0x3f, - 0x93, 0xef, 0xac, 0x04, 0x06, 0x96, 0xe5, 0x74, 0xde, 0xb6, 0x3b, 0x6e, 0xb3, 0x36, 0x1f, 0xef, - 0x62, 0x35, 0x6f, 0x65, 0x59, 0x80, 0x63, 0x1c, 0x5a, 0xa1, 0x4e, 0x35, 0xfb, 0x56, 0xcb, 0x8d, - 0x92, 0x41, 0x40, 0x8b, 0xb2, 0x00, 0xc7, 0x38, 0xe8, 0x39, 0x18, 0xaa, 0xbb, 0xd1, 0x96, 0x53, - 0x4f, 0x7a, 0xf9, 0x16, 0x19, 0x14, 0x8b, 0x52, 0xe6, 0x26, 0x72, 0xa3, 0xad, 0x80, 0x30, 0xcb, - 0x6e, 0xd7, 0x83, 0xeb, 0x45, 0xad, 0x0c, 0x1b, 0x98, 0xac, 0x49, 0xbe, 0xe8, 0x99, 0x88, 0x80, - 0x8c, 0x9b, 0x24, 0x0b, 0x70, 0x8c, 0x43, 0xd7, 0x7f, 0xd5, 0x6f, 0xb5, 0xdd, 0xa6, 0x08, 0x8a, - 0xd5, 0xd6, 0xff, 0x9c, 0x80, 0x63, 0x85, 0x41, 0xb1, 0xa9, 0x08, 0xa3, 0xe2, 0x27, 0xf9, 0x19, - 0x87, 0x4d, 0x01, 0xc7, 0x0a, 0xc3, 0xbe, 0x0d, 0x63, 0x7c, 0x27, 0xcf, 0x35, 0x1d, 0xb7, 0xb5, - 0x38, 0x87, 0x6e, 0x76, 0x45, 0x7e, 0x5f, 0x4b, 0x89, 0xfc, 0xbe, 0x64, 0x54, 0xea, 0x8e, 0x00, - 0xb7, 0xbf, 0x97, 0x83, 0xc2, 0x19, 0x7e, 0x09, 0xe7, 0xcc, 0xbf, 0xb3, 0x86, 0xee, 0x26, 0xbe, - 0x82, 0xb3, 0x99, 0xe5, 0x43, 0x8e, 0x63, 0xbf, 0x80, 0xf3, 0x13, 0x0b, 0x2e, 0x4a, 0x54, 0x26, - 0xd4, 0xca, 0xae, 0xc7, 0xe2, 0x03, 0x4e, 0x7f, 0x98, 0xdf, 0x33, 0x86, 0xf9, 0xf5, 0xec, 0xba, - 0xac, 0xf7, 0xa3, 0xe7, 0xa7, 0xed, 0x7e, 0x6c, 0x41, 0x29, 0xad, 0xc2, 0x19, 0x7c, 0x02, 0xe8, - 0x5d, 0xf3, 0x13, 0x40, 0xb7, 0x4f, 0xa7, 0xe7, 0x3d, 0x3e, 0x05, 0xf4, 0x93, 0x1e, 0xfd, 0x66, - 0xdf, 0xdd, 0x69, 0xca, 0xe3, 0xce, 0xca, 0xca, 0x75, 0xc6, 0x59, 0xa4, 0x9f, 0x9b, 0x4d, 0x18, - 0x0a, 0x99, 0x33, 0x5d, 0x2c, 0x81, 0xa5, 0x2c, 0x0e, 0x41, 0x4a, 0x4f, 0x98, 0x3e, 0xd9, 0x6f, - 0x2c, 0x78, 0xd8, 0xff, 0xc9, 0x82, 0xd1, 0x33, 0xfc, 0xce, 0x93, 0x6f, 0x4e, 0xf2, 0x2b, 0xd9, - 0x4d, 0x72, 0x8f, 0x89, 0x3d, 0xcc, 0x43, 0xd7, 0xa7, 0x6f, 0xd0, 0x17, 0x2c, 0xe5, 0x40, 0xe7, - 0x41, 0x46, 0x6f, 0x66, 0xd7, 0x8e, 0x93, 0xa4, 0x40, 0x42, 0x5f, 0x4f, 0xe4, 0x85, 0xca, 0x65, - 0x95, 0x6c, 0xa1, 0xab, 0x35, 0x0f, 0x90, 0x1f, 0xea, 0xab, 0x16, 0x00, 0x6f, 0xa7, 0xc8, 0xe7, - 0x48, 0xdb, 0xb6, 0x7d, 0x6a, 0x23, 0x45, 0x99, 0xf0, 0xa6, 0x29, 0x01, 0x19, 0x17, 0x60, 0xad, - 0x25, 0x0f, 0x91, 0xf8, 0xe9, 0xa1, 0x73, 0x4e, 0x7d, 0xc5, 0x82, 0xf1, 0x44, 0x73, 0x53, 0xea, - 0xef, 0x98, 0x9f, 0xc4, 0xc8, 0xe0, 0xdc, 0x32, 0xb3, 0xfc, 0xe9, 0xb7, 0xb4, 0x3f, 0xf9, 0x29, - 0x30, 0xbe, 0x19, 0x86, 0xde, 0x85, 0xa2, 0xbc, 0x62, 0xc9, 0xe5, 0x9d, 0xe5, 0xa7, 0x81, 0x94, - 0x1e, 0x25, 0x21, 0x21, 0x8e, 0xf9, 0x25, 0xe2, 0x73, 0x72, 0x7d, 0xc5, 0xe7, 0x3c, 0xda, 0x0f, - 0x0b, 0xa5, 0x1b, 0xc0, 0x06, 0x4f, 0xc5, 0x00, 0xf6, 0x64, 0xe6, 0x06, 0xb0, 0xa7, 0xce, 0xd8, - 0x00, 0xa6, 0x79, 0x23, 0xf2, 0x0f, 0xe1, 0x8d, 0x78, 0x17, 0x2e, 0xee, 0xc5, 0xda, 0xad, 0x5a, - 0x49, 0xe2, 0xfb, 0x48, 0xd7, 0x52, 0xcd, 0x5e, 0x54, 0x53, 0x0f, 0x23, 0xe2, 0x45, 0x9a, 0x5e, - 0x1c, 0x87, 0x06, 0xdd, 0x4e, 0x21, 0x87, 0x53, 0x99, 0x24, 0xcd, 0xca, 0xc3, 0x7d, 0x98, 0x95, - 0xbf, 0xd5, 0xf3, 0x43, 0xf2, 0x85, 0xd3, 0xfd, 0x90, 0xfc, 0xe3, 0x27, 0xfe, 0x88, 0xfc, 0xb3, - 0xb1, 0x8f, 0x8f, 0x07, 0x94, 0xa5, 0x3b, 0xe4, 0xbe, 0x9e, 0x0c, 0x1c, 0x00, 0x36, 0xf4, 0x9f, - 0xce, 0x56, 0xad, 0xcf, 0x20, 0x78, 0x60, 0xe4, 0x21, 0x82, 0x07, 0x12, 0x36, 0xfe, 0xd1, 0x8c, - 0x6c, 0xfc, 0x1e, 0x4c, 0xb8, 0x2d, 0xa7, 0x4e, 0x36, 0x3b, 0xcd, 0x26, 0x0f, 0x1d, 0x97, 0x1f, - 0x6f, 0x4a, 0x35, 0x15, 0xac, 0xfa, 0x55, 0xa7, 0x99, 0xfc, 0x46, 0x9e, 0x0a, 0x91, 0x5f, 0x4e, - 0x50, 0xc2, 0x5d, 0xb4, 0xe9, 0x82, 0x65, 0x19, 0x6e, 0x48, 0x44, 0x47, 0x9b, 0x79, 0xa8, 0x0b, - 0x7c, 0xc1, 0x2e, 0xc5, 0x60, 0xac, 0xe3, 0xa0, 0x15, 0x28, 0xd6, 0xbc, 0x50, 0x3c, 0x3a, 0x1b, - 0x67, 0xc2, 0xec, 0x23, 0x54, 0x04, 0xce, 0xaf, 0x57, 0xd4, 0x73, 0xb3, 0x27, 0x53, 0x92, 0x27, - 0xa9, 0x72, 0x1c, 0xd7, 0x47, 0x6b, 0x8c, 0x98, 0xc8, 0xbf, 0xcf, 0x1d, 0xc7, 0x4f, 0xf7, 0xb0, - 0x4c, 0xcf, 0xaf, 0xcb, 0x2f, 0x08, 0x8c, 0x09, 0x76, 0x22, 0x91, 0x7e, 0x4c, 0x41, 0xfb, 0x88, - 0xd6, 0xf9, 0x63, 0x3f, 0xa2, 0xc5, 0xb2, 0xa6, 0x45, 0x4d, 0xe5, 0x87, 0xba, 0x9a, 0x59, 0xd6, - 0xb4, 0x38, 0x24, 0x4b, 0x64, 0x4d, 0x8b, 0x01, 0x58, 0x67, 0x89, 0x36, 0x7a, 0xf9, 0xe3, 0x2e, - 0x30, 0xa1, 0x71, 0x72, 0xef, 0x9a, 0xee, 0x98, 0xb9, 0x78, 0xac, 0x63, 0xa6, 0xcb, 0x91, 0x74, - 0xe9, 0x04, 0x8e, 0xa4, 0x06, 0xcb, 0x67, 0xb5, 0x38, 0x27, 0x7c, 0x77, 0x19, 0xdc, 0x58, 0xd8, - 0x33, 0x74, 0x1e, 0xe2, 0xc6, 0x7e, 0x62, 0xce, 0xa0, 0x67, 0xe4, 0xe6, 0x95, 0x07, 0x8e, 0xdc, - 0xa4, 0xe2, 0x39, 0x86, 0xb3, 0xc4, 0x68, 0x79, 0x21, 0x9e, 0x63, 0x30, 0xd6, 0x71, 0x92, 0x6e, - 0x99, 0xc7, 0x4f, 0xcd, 0x2d, 0x33, 0x79, 0x06, 0x6e, 0x99, 0x27, 0xfa, 0x76, 0xcb, 0xbc, 0x0f, - 0x17, 0xda, 0x7e, 0x6d, 0xde, 0x0d, 0x83, 0x0e, 0x7b, 0x4b, 0x53, 0xee, 0xd4, 0xea, 0x24, 0x62, - 0x7e, 0x9d, 0x91, 0xeb, 0xd7, 0xf5, 0x46, 0xb6, 0xd9, 0x46, 0x9e, 0xde, 0x7b, 0x71, 0x9b, 0x44, - 0x7c, 0x32, 0x93, 0xb5, 0x98, 0x45, 0x80, 0xc5, 0xf8, 0xa5, 0x14, 0xe2, 0x34, 0x3e, 0xba, 0x57, - 0xe8, 0xe9, 0xb3, 0xf1, 0x0a, 0x7d, 0x12, 0x0a, 0x61, 0xa3, 0x13, 0xd5, 0xfc, 0x7d, 0x8f, 0xb9, - 0xfe, 0x8a, 0xea, 0x33, 0xba, 0x85, 0x8a, 0x80, 0xdf, 0x3b, 0x9c, 0x9a, 0x90, 0xbf, 0x35, 0x9b, - 0x99, 0x80, 0xa0, 0xdf, 0xea, 0xf1, 0x5a, 0xc0, 0x3e, 0xcd, 0xd7, 0x02, 0x57, 0x4e, 0xf4, 0x52, - 0x20, 0xcd, 0xf5, 0xf5, 0xcc, 0x87, 0xce, 0xf5, 0xf5, 0x9b, 0x16, 0x8c, 0xed, 0xe9, 0x06, 0x4a, - 0xe1, 0x9e, 0xcb, 0x20, 0x4c, 0xc0, 0xb0, 0x7b, 0x96, 0x6d, 0x2a, 0xec, 0x0c, 0xd0, 0xbd, 0x24, - 0x00, 0x9b, 0x2d, 0x49, 0x09, 0x61, 0x78, 0xf6, 0x51, 0x85, 0x30, 0xbc, 0xcf, 0x84, 0x99, 0xbc, - 0xe9, 0x32, 0x9f, 0x5d, 0xb6, 0x11, 0x8c, 0x52, 0x30, 0xaa, 0x00, 0x46, 0x9d, 0x1f, 0xfa, 0xb2, - 0x05, 0x13, 0xf2, 0x72, 0x26, 0x1c, 0x0c, 0xa1, 0x88, 0xc1, 0xca, 0xf2, 0x4e, 0xc8, 0x82, 0x78, - 0xb7, 0x12, 0x7c, 0x70, 0x17, 0x67, 0x2a, 0xda, 0x55, 0xc8, 0x4b, 0x3d, 0x64, 0xa1, 0x86, 0x42, - 0x91, 0x99, 0x8d, 0xc1, 0x58, 0xc7, 0x41, 0xdf, 0x50, 0x9f, 0xc7, 0xbc, 0xc6, 0xa4, 0xfa, 0x6b, - 0x19, 0x2b, 0xa8, 0x59, 0x7c, 0x23, 0x13, 0xfd, 0x9a, 0x05, 0x13, 0xfb, 0x09, 0xab, 0x86, 0x08, - 0x42, 0xc3, 0xd9, 0xdb, 0x4b, 0x7a, 0x7c, 0xcf, 0xb9, 0xab, 0x05, 0x0f, 0xed, 0x01, 0xfe, 0x50, - 0x7d, 0xfb, 0xf3, 0x8f, 0x11, 0x9c, 0x4b, 0x7c, 0x9c, 0xfa, 0xa3, 0x66, 0x2e, 0xe5, 0xab, 0xc9, - 0xb4, 0xb4, 0x63, 0x12, 0xdf, 0x48, 0x4d, 0x6b, 0xe4, 0x8e, 0xcd, 0x9d, 0x6a, 0xee, 0xd8, 0x81, - 0xb3, 0xc9, 0x1d, 0x3b, 0x71, 0x1a, 0xb9, 0x63, 0xcf, 0x9f, 0x28, 0x77, 0xac, 0x96, 0xbb, 0x77, - 0xf0, 0x3e, 0xb9, 0x7b, 0x67, 0x61, 0x5c, 0x46, 0xf7, 0x13, 0x91, 0x14, 0x94, 0x3b, 0xf6, 0xae, - 0x88, 0x2a, 0xe3, 0x73, 0x66, 0x31, 0x4e, 0xe2, 0xa3, 0x0f, 0x2c, 0xc8, 0x7b, 0xac, 0xe6, 0x50, - 0x56, 0x89, 0xf4, 0xcd, 0xa5, 0xc5, 0xee, 0xad, 0x42, 0x2c, 0xc8, 0x78, 0xc6, 0x3c, 0x83, 0xdd, - 0x93, 0x3f, 0x30, 0x6f, 0x01, 0x7a, 0x03, 0x4a, 0xfe, 0xce, 0x4e, 0xd3, 0x77, 0x6a, 0x71, 0x82, - 0x5b, 0xe9, 0x79, 0xe4, 0xaf, 0xb3, 0x54, 0x82, 0xbf, 0x8d, 0x1e, 0x78, 0xb8, 0x27, 0x05, 0xf4, - 0x2d, 0xaa, 0x0c, 0x44, 0x7e, 0x40, 0x6a, 0xb1, 0x91, 0xa4, 0xc8, 0xfa, 0x4c, 0x32, 0xef, 0x73, - 0xc5, 0xe4, 0xc3, 0x7b, 0xaf, 0x26, 0x25, 0x51, 0x8a, 0x93, 0xcd, 0x42, 0x01, 0x5c, 0x6e, 0xa7, - 0xd9, 0x68, 0x42, 0xf1, 0x26, 0xe1, 0x38, 0x4b, 0x91, 0xdc, 0xba, 0x97, 0x53, 0xad, 0x3c, 0x21, - 0xee, 0x41, 0x59, 0x4f, 0x7d, 0x5b, 0x38, 0x9b, 0xd4, 0xb7, 0xe6, 0x27, 0xe5, 0xc7, 0xce, 0xfc, - 0x93, 0xf2, 0xe8, 0xff, 0xa6, 0x66, 0x69, 0xe6, 0xa6, 0x8d, 0x7a, 0xe6, 0x6b, 0xe2, 0x43, 0x97, - 0xa9, 0xf9, 0x1f, 0x59, 0x30, 0xc9, 0x57, 0x5e, 0x52, 0xa1, 0xa6, 0xc7, 0xb9, 0x88, 0xde, 0xcf, - 0xda, 0x39, 0xcd, 0xe2, 0x74, 0x2a, 0x06, 0x57, 0xe6, 0x33, 0x3d, 0xa6, 0x25, 0xe8, 0xab, 0x29, - 0x6a, 0xfc, 0x78, 0x56, 0xc6, 0xc2, 0xf4, 0x0c, 0xbf, 0x17, 0x8e, 0xfa, 0xd1, 0xdc, 0xff, 0x49, - 0x4f, 0x5b, 0x26, 0x62, 0xcd, 0xfb, 0x6b, 0xa7, 0x64, 0xcb, 0xd4, 0xd3, 0x10, 0x9f, 0xc4, 0xa2, - 0x39, 0xf9, 0x05, 0x8b, 0x7f, 0x29, 0xa0, 0xa7, 0x16, 0xb2, 0x6d, 0x6a, 0x21, 0xab, 0x59, 0xe6, - 0x2a, 0xd7, 0xd5, 0xa1, 0x5f, 0xb5, 0xe0, 0x62, 0x9a, 0x90, 0x4c, 0x69, 0xd2, 0xa7, 0xcd, 0x26, - 0x65, 0xa8, 0x6c, 0xeb, 0x0d, 0xca, 0x26, 0x41, 0xf3, 0x8f, 0x8b, 0x9a, 0x07, 0x29, 0x22, 0xed, - 0xcc, 0x03, 0xfd, 0x3c, 0x18, 0x72, 0xbd, 0xa6, 0xeb, 0x11, 0xf1, 0xa8, 0x27, 0xcb, 0xab, 0x87, - 0x48, 0x88, 0x4e, 0xa9, 0x63, 0xc1, 0xe5, 0x11, 0x3b, 0x94, 0x92, 0x1f, 0x7b, 0x18, 0x3c, 0xfb, - 0x8f, 0x3d, 0xec, 0x43, 0x71, 0xdf, 0x8d, 0x1a, 0xcc, 0x11, 0x2e, 0xfc, 0x34, 0x19, 0x3c, 0x86, - 0xa1, 0xe4, 0xe2, 0xbe, 0xdf, 0x91, 0x0c, 0x70, 0xcc, 0x0b, 0xcd, 0x70, 0xc6, 0x2c, 0xbc, 0x2f, - 0x19, 0x77, 0x75, 0x47, 0x16, 0xe0, 0x18, 0x87, 0x0e, 0xd6, 0x28, 0xfd, 0x27, 0x93, 0x5e, 0x88, - 0x8c, 0x84, 0x59, 0x24, 0xbc, 0x12, 0x14, 0xf9, 0x93, 0xb3, 0x3b, 0x1a, 0x0f, 0x6c, 0x70, 0x54, - 0x49, 0x21, 0x0b, 0x3d, 0x93, 0x42, 0xbe, 0xc7, 0xce, 0xfc, 0xc8, 0xf5, 0x3a, 0x64, 0xc3, 0x13, - 0x41, 0x81, 0xab, 0xd9, 0x3c, 0x90, 0xe3, 0x34, 0x79, 0x3a, 0x82, 0xf8, 0x3f, 0xd6, 0xf8, 0x69, - 0xe6, 0xf2, 0x91, 0x63, 0xcd, 0xe5, 0xf1, 0x4d, 0x79, 0x34, 0xf3, 0x9b, 0x72, 0x44, 0xda, 0x99, - 0xdc, 0x94, 0x3f, 0x54, 0x37, 0xca, 0xff, 0x6d, 0x01, 0x52, 0x47, 0xb7, 0x13, 0xee, 0x8a, 0x2f, - 0xf4, 0x9c, 0x7e, 0x88, 0xd7, 0xe7, 0x2c, 0x00, 0x4f, 0x7d, 0x12, 0x28, 0xdb, 0x53, 0x8b, 0xd3, - 0x8c, 0x1b, 0x10, 0xc3, 0xb0, 0xc6, 0xd3, 0xfe, 0x1f, 0x16, 0x5c, 0xee, 0xee, 0xfb, 0x19, 0x04, - 0x00, 0x1d, 0x98, 0x01, 0x40, 0x5b, 0x19, 0x5a, 0x5c, 0x55, 0x37, 0x7a, 0x84, 0x02, 0xfd, 0x28, - 0x07, 0xe3, 0x3a, 0x72, 0x85, 0x9c, 0xc5, 0x64, 0xef, 0x1b, 0xf1, 0x7c, 0xb7, 0xb2, 0xed, 0x6f, - 0x45, 0x18, 0xee, 0xd3, 0xa2, 0x27, 0x3f, 0x9b, 0x88, 0x9e, 0xbc, 0x93, 0x3d, 0xeb, 0xe3, 0x83, - 0x28, 0xff, 0x9b, 0x05, 0x17, 0x12, 0x35, 0xce, 0x60, 0x81, 0xed, 0x99, 0x0b, 0xec, 0xd5, 0xcc, - 0x7b, 0xdd, 0x63, 0x75, 0xfd, 0x76, 0xae, 0xab, 0xb7, 0xec, 0x1e, 0xf0, 0xcb, 0x16, 0xe4, 0x23, - 0x27, 0xdc, 0x95, 0xb1, 0x38, 0x9f, 0x3e, 0x95, 0x15, 0x30, 0x4d, 0x7f, 0x0b, 0xe9, 0xac, 0xda, - 0xc7, 0x60, 0x98, 0x73, 0x9f, 0xfc, 0x25, 0x0b, 0x20, 0x46, 0x7a, 0x54, 0x2a, 0xab, 0xfd, 0xbb, - 0x39, 0xb8, 0x94, 0xba, 0x8c, 0xd0, 0x17, 0x95, 0x51, 0xc7, 0xca, 0x3a, 0xd2, 0xcc, 0x60, 0xa4, - 0xdb, 0x76, 0xc6, 0x0c, 0xdb, 0x8e, 0x30, 0xe9, 0x3c, 0xaa, 0x0b, 0x87, 0x10, 0xd3, 0xda, 0x60, - 0xfd, 0xd0, 0x8a, 0x83, 0x17, 0x55, 0xf2, 0x8b, 0xff, 0x0f, 0x23, 0xbd, 0xed, 0x1f, 0x69, 0xf1, - 0xd6, 0xb2, 0xa3, 0x67, 0x20, 0x2b, 0xf6, 0x4d, 0x59, 0x81, 0xb3, 0x77, 0xff, 0xf5, 0x10, 0x16, - 0x6f, 0x43, 0x9a, 0x3f, 0xb0, 0xbf, 0xcc, 0x59, 0xc6, 0x9b, 0xa9, 0x5c, 0xdf, 0x6f, 0xa6, 0xc6, - 0x60, 0xe4, 0x75, 0xb7, 0xad, 0x5c, 0x57, 0xd3, 0xdf, 0xf9, 0xc1, 0xd5, 0xc7, 0xfe, 0xf0, 0x07, - 0x57, 0x1f, 0xfb, 0xde, 0x0f, 0xae, 0x3e, 0xf6, 0xb9, 0xa3, 0xab, 0xd6, 0x77, 0x8e, 0xae, 0x5a, - 0x7f, 0x78, 0x74, 0xd5, 0xfa, 0xde, 0xd1, 0x55, 0xeb, 0x3f, 0x1f, 0x5d, 0xb5, 0xfe, 0xd6, 0x7f, - 0xb9, 0xfa, 0xd8, 0xeb, 0x05, 0xd9, 0xb1, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x29, 0x77, 0x02, - 0xd6, 0x0f, 0xb2, 0x00, 0x00, + // 10618 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0xbd, 0x7b, 0x70, 0x24, 0xc7, + 0x79, 0x18, 0xce, 0x59, 0x60, 0xf1, 0xf8, 0xf0, 0x38, 0x5c, 0xdf, 0x6b, 0x09, 0x92, 0x07, 0x7a, + 0x28, 0xf2, 0xc7, 0xb3, 0x29, 0xc0, 0x3c, 0x4a, 0xbf, 0x30, 0x52, 0x22, 0x09, 0x8f, 0x03, 0x0e, + 0x04, 0x70, 0x00, 0x7b, 0x71, 0x77, 0x26, 0xc5, 0x48, 0x1a, 0xec, 0x36, 0x76, 0x87, 0xd8, 0x9d, + 0x59, 0xcd, 0xcc, 0x02, 0x07, 0xf2, 0x28, 0x29, 0xb2, 0x9e, 0xb1, 0x62, 0x25, 0xb6, 0x24, 0x4b, + 0x4a, 0x52, 0xa5, 0x28, 0x52, 0xa2, 0x52, 0x5c, 0x49, 0xc9, 0x95, 0x3f, 0x52, 0xf6, 0x7f, 0xa9, + 0x94, 0x4b, 0x29, 0xa7, 0x2a, 0x72, 0x99, 0x89, 0xf4, 0x47, 0x0c, 0x46, 0x70, 0xa2, 0xaa, 0x24, + 0xa5, 0xaa, 0x44, 0x15, 0x3b, 0xf6, 0xe5, 0x51, 0xa9, 0x7e, 0x4e, 0xf7, 0xec, 0x2c, 0x0e, 0xb8, + 0x6b, 0xe0, 0x58, 0xf6, 0x5f, 0xc0, 0x7e, 0xfd, 0xf5, 0xf7, 0x75, 0xf7, 0xf4, 0x7c, 0xfd, 0xbd, + 0xfa, 0x1b, 0x58, 0xab, 0xf9, 0x49, 0xbd, 0xbd, 0x31, 0x59, 0x09, 0x9b, 0x53, 0x5e, 0x54, 0x0b, + 0x5b, 0x51, 0xf8, 0x2a, 0xfb, 0xe7, 0x9d, 0x3b, 0x61, 0xb4, 0xb5, 0xd9, 0x08, 0x77, 0xe2, 0xa9, + 0xed, 0xe7, 0xa6, 0x5a, 0x5b, 0xb5, 0x29, 0xaf, 0xe5, 0xc7, 0x53, 0x12, 0x3a, 0xb5, 0xfd, 0xac, + 0xd7, 0x68, 0xd5, 0xbd, 0x67, 0xa7, 0x6a, 0x24, 0x20, 0x91, 0x97, 0x90, 0xea, 0x64, 0x2b, 0x0a, + 0x93, 0x10, 0x7d, 0x20, 0xa5, 0x38, 0x29, 0x29, 0xb2, 0x7f, 0x3e, 0xac, 0x28, 0x4e, 0x6e, 0x3f, + 0x37, 0xd9, 0xda, 0xaa, 0x4d, 0x52, 0x8a, 0x93, 0x12, 0x3a, 0x29, 0x29, 0x8e, 0xbf, 0x53, 0x1b, + 0x53, 0x2d, 0xac, 0x85, 0x53, 0x8c, 0xf0, 0x46, 0x7b, 0x93, 0xfd, 0x62, 0x3f, 0xd8, 0x7f, 0x9c, + 0xe1, 0xb8, 0xbb, 0xf5, 0x7c, 0x3c, 0xe9, 0x87, 0x74, 0x7c, 0x53, 0x95, 0x30, 0x22, 0x53, 0xdb, + 0x1d, 0x83, 0x1a, 0xbf, 0xa4, 0xe1, 0xb4, 0xc2, 0x86, 0x5f, 0xd9, 0x9d, 0xda, 0x7e, 0x76, 0x83, + 0x24, 0x9d, 0xe3, 0x1f, 0x7f, 0x57, 0x8a, 0xda, 0xf4, 0x2a, 0x75, 0x3f, 0x20, 0xd1, 0x6e, 0x3a, + 0xff, 0x26, 0x49, 0xbc, 0x3c, 0x06, 0x53, 0xdd, 0x7a, 0x45, 0xed, 0x20, 0xf1, 0x9b, 0xa4, 0xa3, + 0xc3, 0xff, 0x7f, 0xb7, 0x0e, 0x71, 0xa5, 0x4e, 0x9a, 0x5e, 0x47, 0xbf, 0xe7, 0xba, 0xf5, 0x6b, + 0x27, 0x7e, 0x63, 0xca, 0x0f, 0x92, 0x38, 0x89, 0xb2, 0x9d, 0xdc, 0x2b, 0xd0, 0x37, 0xdd, 0x0c, + 0xdb, 0x41, 0x82, 0xde, 0x0b, 0xc5, 0x6d, 0xaf, 0xd1, 0x26, 0x25, 0xe7, 0x71, 0xe7, 0xe9, 0xc1, + 0x99, 0x27, 0xbf, 0xbf, 0x37, 0xf1, 0xd0, 0xfe, 0xde, 0x44, 0xf1, 0x06, 0x05, 0xde, 0xd9, 0x9b, + 0x38, 0x4b, 0x82, 0x4a, 0x58, 0xf5, 0x83, 0xda, 0xd4, 0xab, 0x71, 0x18, 0x4c, 0x5e, 0x6b, 0x37, + 0x37, 0x48, 0x84, 0x79, 0x1f, 0xf7, 0x0f, 0x0a, 0x70, 0x6a, 0x3a, 0xaa, 0xd4, 0xfd, 0x6d, 0x52, + 0x4e, 0x28, 0xfd, 0xda, 0x2e, 0xaa, 0x43, 0x4f, 0xe2, 0x45, 0x8c, 0xdc, 0xd0, 0xe5, 0x95, 0xc9, + 0xfb, 0x7d, 0xf8, 0x93, 0xeb, 0x5e, 0x24, 0x69, 0xcf, 0xf4, 0xef, 0xef, 0x4d, 0xf4, 0xac, 0x7b, + 0x11, 0xa6, 0x2c, 0x50, 0x03, 0x7a, 0x83, 0x30, 0x20, 0xa5, 0x02, 0x63, 0x75, 0xed, 0xfe, 0x59, + 0x5d, 0x0b, 0x03, 0x35, 0x8f, 0x99, 0x81, 0xfd, 0xbd, 0x89, 0x5e, 0x0a, 0xc1, 0x8c, 0x0b, 0x9d, + 0xd7, 0x6b, 0x7e, 0xab, 0xd4, 0x63, 0x6b, 0x5e, 0x2f, 0xfb, 0x2d, 0x73, 0x5e, 0x2f, 0xfb, 0x2d, + 0x4c, 0x59, 0xb8, 0x9f, 0x2f, 0xc0, 0xe0, 0x74, 0x54, 0x6b, 0x37, 0x49, 0x90, 0xc4, 0xe8, 0xe3, + 0x00, 0x2d, 0x2f, 0xf2, 0x9a, 0x24, 0x21, 0x51, 0x5c, 0x72, 0x1e, 0xef, 0x79, 0x7a, 0xe8, 0xf2, + 0xd2, 0xfd, 0xb3, 0x5f, 0x93, 0x34, 0x67, 0x90, 0x78, 0xe4, 0xa0, 0x40, 0x31, 0xd6, 0x58, 0xa2, + 0xd7, 0x61, 0xd0, 0x8b, 0x12, 0x7f, 0xd3, 0xab, 0x24, 0x71, 0xa9, 0xc0, 0xf8, 0xbf, 0x70, 0xff, + 0xfc, 0xa7, 0x05, 0xc9, 0x99, 0xd3, 0x82, 0xfd, 0xa0, 0x84, 0xc4, 0x38, 0xe5, 0xe7, 0xfe, 0x76, + 0x2f, 0x0c, 0x4d, 0x47, 0xc9, 0xc2, 0x6c, 0x39, 0xf1, 0x92, 0x76, 0x8c, 0x7e, 0xcf, 0x81, 0x33, + 0x31, 0x5f, 0x36, 0x9f, 0xc4, 0x6b, 0x51, 0x58, 0x21, 0x71, 0x4c, 0xaa, 0x62, 0x5d, 0x36, 0xad, + 0x8c, 0x4b, 0x32, 0x9b, 0x2c, 0x77, 0x32, 0xba, 0x12, 0x24, 0xd1, 0xee, 0xcc, 0xb3, 0x62, 0xcc, + 0x67, 0x72, 0x30, 0x3e, 0xf9, 0xd6, 0x04, 0x92, 0x53, 0xa1, 0x94, 0xf8, 0x23, 0xc6, 0x79, 0xa3, + 0x46, 0x5f, 0x73, 0x60, 0xb8, 0x15, 0x56, 0x63, 0x4c, 0x2a, 0x61, 0xbb, 0x45, 0xaa, 0x62, 0x79, + 0x3f, 0x6c, 0x77, 0x1a, 0x6b, 0x1a, 0x07, 0x3e, 0xfe, 0xb3, 0x62, 0xfc, 0xc3, 0x7a, 0x13, 0x36, + 0x86, 0x82, 0x9e, 0x87, 0xe1, 0x20, 0x4c, 0xca, 0x2d, 0x52, 0xf1, 0x37, 0x7d, 0x52, 0x65, 0x1b, + 0x7f, 0x20, 0xed, 0x79, 0x4d, 0x6b, 0xc3, 0x06, 0xe6, 0xf8, 0x3c, 0x94, 0xba, 0xad, 0x1c, 0x1a, + 0x83, 0x9e, 0x2d, 0xb2, 0xcb, 0x85, 0x0d, 0xa6, 0xff, 0xa2, 0xb3, 0x52, 0x00, 0xd1, 0xd7, 0x78, + 0x40, 0x48, 0x96, 0xf7, 0x14, 0x9e, 0x77, 0xc6, 0xdf, 0x0f, 0xa7, 0x3b, 0x86, 0x7e, 0x14, 0x02, + 0xee, 0x0f, 0xfa, 0x60, 0x40, 0x3e, 0x0a, 0xf4, 0x38, 0xf4, 0x06, 0x5e, 0x53, 0xca, 0xb9, 0x61, + 0x31, 0x8f, 0xde, 0x6b, 0x5e, 0x93, 0xbe, 0xe1, 0x5e, 0x93, 0x50, 0x8c, 0x96, 0x97, 0xd4, 0x19, + 0x1d, 0x0d, 0x63, 0xcd, 0x4b, 0xea, 0x98, 0xb5, 0xa0, 0x47, 0xa1, 0xb7, 0x19, 0x56, 0x09, 0x5b, + 0x8b, 0x22, 0x97, 0x10, 0x2b, 0x61, 0x95, 0x60, 0x06, 0xa5, 0xfd, 0x37, 0xa3, 0xb0, 0x59, 0xea, + 0x35, 0xfb, 0xcf, 0x47, 0x61, 0x13, 0xb3, 0x16, 0xf4, 0x55, 0x07, 0xc6, 0xe4, 0xde, 0x5e, 0x0e, + 0x2b, 0x5e, 0xe2, 0x87, 0x41, 0xa9, 0xc8, 0x24, 0x0a, 0xb6, 0xf7, 0x4a, 0x49, 0xca, 0x33, 0x25, + 0x31, 0x84, 0xb1, 0x6c, 0x0b, 0xee, 0x18, 0x05, 0xba, 0x0c, 0x50, 0x6b, 0x84, 0x1b, 0x5e, 0x83, + 0x2e, 0x48, 0xa9, 0x8f, 0x4d, 0x41, 0x49, 0x86, 0x05, 0xd5, 0x82, 0x35, 0x2c, 0x74, 0x0b, 0xfa, + 0x3d, 0x2e, 0xfd, 0x4b, 0xfd, 0x6c, 0x12, 0x2f, 0xda, 0x98, 0x84, 0x71, 0x9c, 0xcc, 0x0c, 0xed, + 0xef, 0x4d, 0xf4, 0x0b, 0x20, 0x96, 0xec, 0xd0, 0x33, 0x30, 0x10, 0xb6, 0xe8, 0xb8, 0xbd, 0x46, + 0x69, 0x80, 0x6d, 0xcc, 0x31, 0x31, 0xd6, 0x81, 0x55, 0x01, 0xc7, 0x0a, 0x03, 0x5d, 0x82, 0xfe, + 0xb8, 0xbd, 0x41, 0x9f, 0x63, 0x69, 0x90, 0x4d, 0xec, 0x94, 0x40, 0xee, 0x2f, 0x73, 0x30, 0x96, + 0xed, 0xe8, 0xdd, 0x30, 0x14, 0x91, 0x4a, 0x3b, 0x8a, 0x09, 0x7d, 0xb0, 0x25, 0x60, 0xb4, 0xcf, + 0x08, 0xf4, 0x21, 0x9c, 0x36, 0x61, 0x1d, 0x0f, 0xbd, 0x0f, 0x46, 0xe9, 0x03, 0xbe, 0x72, 0xab, + 0x15, 0x91, 0x38, 0xa6, 0x4f, 0x75, 0x88, 0x31, 0x3a, 0x2f, 0x7a, 0x8e, 0xce, 0x1b, 0xad, 0x38, + 0x83, 0x8d, 0x6e, 0x03, 0x78, 0x4a, 0x66, 0x94, 0x86, 0xd9, 0x62, 0x2e, 0xdb, 0xdb, 0x11, 0x0b, + 0xb3, 0x33, 0xa3, 0xf4, 0x39, 0xa6, 0xbf, 0xb1, 0xc6, 0x8f, 0xae, 0x4f, 0x95, 0x34, 0x48, 0x42, + 0xaa, 0xa5, 0x11, 0x36, 0x61, 0xb5, 0x3e, 0x73, 0x1c, 0x8c, 0x65, 0xbb, 0xfb, 0x77, 0x0a, 0xa0, + 0x51, 0x41, 0x33, 0x30, 0x20, 0xe4, 0x9a, 0x78, 0x25, 0x67, 0x9e, 0x92, 0xcf, 0x41, 0x3e, 0xc1, + 0x3b, 0x7b, 0xb9, 0xf2, 0x50, 0xf5, 0x43, 0x6f, 0xc0, 0x50, 0x2b, 0xac, 0xae, 0x90, 0xc4, 0xab, + 0x7a, 0x89, 0x27, 0x4e, 0x73, 0x0b, 0x27, 0x8c, 0xa4, 0x38, 0x73, 0x8a, 0x3e, 0xba, 0xb5, 0x94, + 0x05, 0xd6, 0xf9, 0xa1, 0x17, 0x00, 0xc5, 0x24, 0xda, 0xf6, 0x2b, 0x64, 0xba, 0x52, 0xa1, 0x2a, + 0x11, 0x7b, 0x01, 0x7a, 0xd8, 0x64, 0xc6, 0xc5, 0x64, 0x50, 0xb9, 0x03, 0x03, 0xe7, 0xf4, 0x72, + 0xdf, 0x2c, 0xc0, 0xa8, 0x36, 0xd7, 0x16, 0xa9, 0xa0, 0xef, 0x38, 0x70, 0x4a, 0x1d, 0x67, 0x33, + 0xbb, 0xd7, 0xe8, 0xae, 0xe2, 0x87, 0x15, 0xb1, 0xf9, 0x7c, 0x29, 0x2f, 0xf5, 0x53, 0xf0, 0xe1, + 0xb2, 0xfe, 0x82, 0x98, 0xc3, 0xa9, 0x4c, 0x2b, 0xce, 0x0e, 0x6b, 0xfc, 0x2b, 0x0e, 0x9c, 0xcd, + 0x23, 0x91, 0x23, 0x73, 0xeb, 0xba, 0xcc, 0xb5, 0x2a, 0xbc, 0x28, 0x57, 0x3a, 0x19, 0x5d, 0x8e, + 0xff, 0xdf, 0x02, 0x8c, 0xe9, 0x5b, 0x88, 0x69, 0x02, 0xff, 0xc2, 0x81, 0x73, 0x72, 0x06, 0x98, + 0xc4, 0xed, 0x46, 0x66, 0x79, 0x9b, 0x56, 0x97, 0x97, 0x9f, 0xa4, 0xd3, 0x79, 0xfc, 0xf8, 0x32, + 0x3f, 0x26, 0x96, 0xf9, 0x5c, 0x2e, 0x0e, 0xce, 0x1f, 0xea, 0xf8, 0xb7, 0x1c, 0x18, 0xef, 0x4e, + 0x34, 0x67, 0xe1, 0x5b, 0xe6, 0xc2, 0xbf, 0x6c, 0x6f, 0x92, 0x9c, 0x3d, 0x5b, 0x7e, 0x36, 0x59, + 0xfd, 0x01, 0xfc, 0xe6, 0x00, 0x74, 0x9c, 0x21, 0xe8, 0x59, 0x18, 0x12, 0xe2, 0x78, 0x39, 0xac, + 0xc5, 0x6c, 0x90, 0x03, 0xfc, 0x5d, 0x9b, 0x4e, 0xc1, 0x58, 0xc7, 0x41, 0x55, 0x28, 0xc4, 0xcf, + 0x89, 0xa1, 0x5b, 0x10, 0x6f, 0xe5, 0xe7, 0x94, 0x16, 0xd9, 0xb7, 0xbf, 0x37, 0x51, 0x28, 0x3f, + 0x87, 0x0b, 0xf1, 0x73, 0x54, 0x53, 0xaf, 0xf9, 0x89, 0x3d, 0x4d, 0x7d, 0xc1, 0x4f, 0x14, 0x1f, + 0xa6, 0xa9, 0x2f, 0xf8, 0x09, 0xa6, 0x2c, 0xa8, 0x05, 0x52, 0x4f, 0x92, 0x16, 0x3b, 0xf1, 0xad, + 0x58, 0x20, 0x57, 0xd7, 0xd7, 0xd7, 0x14, 0x2f, 0xa6, 0x5f, 0x50, 0x08, 0x66, 0x5c, 0xd0, 0xe7, + 0x1c, 0xba, 0xe2, 0xbc, 0x31, 0x8c, 0x76, 0x85, 0xe2, 0x70, 0xdd, 0xde, 0x16, 0x08, 0xa3, 0x5d, + 0xc5, 0x5c, 0x3c, 0x48, 0xd5, 0x80, 0x75, 0xd6, 0x6c, 0xe2, 0xd5, 0xcd, 0x98, 0xe9, 0x09, 0x76, + 0x26, 0x3e, 0x37, 0x5f, 0xce, 0x4c, 0x7c, 0x6e, 0xbe, 0x8c, 0x19, 0x17, 0xfa, 0x40, 0x23, 0x6f, + 0x47, 0xe8, 0x18, 0x16, 0x1e, 0x28, 0xf6, 0x76, 0xcc, 0x07, 0x8a, 0xbd, 0x1d, 0x4c, 0x59, 0x50, + 0x4e, 0x61, 0x1c, 0x33, 0x95, 0xc2, 0x0a, 0xa7, 0xd5, 0x72, 0xd9, 0xe4, 0xb4, 0x5a, 0x2e, 0x63, + 0xca, 0x82, 0x6d, 0xd2, 0x4a, 0xcc, 0xf4, 0x11, 0x3b, 0x9b, 0x74, 0x36, 0xc3, 0x69, 0x61, 0xb6, + 0x8c, 0x29, 0x0b, 0x2a, 0x32, 0xbc, 0xd7, 0xda, 0x11, 0x57, 0x66, 0x86, 0x2e, 0xaf, 0x5a, 0xd8, + 0x2f, 0x94, 0x9c, 0xe2, 0x36, 0xb8, 0xbf, 0x37, 0x51, 0x64, 0x20, 0xcc, 0x19, 0xb9, 0xbf, 0xdb, + 0x93, 0x8a, 0x0b, 0x29, 0xcf, 0xd1, 0xdf, 0x66, 0x07, 0xa1, 0x90, 0x05, 0x42, 0xf5, 0x75, 0x8e, + 0x4d, 0xf5, 0x3d, 0xc3, 0x4f, 0x3c, 0x83, 0x1d, 0xce, 0xf2, 0x47, 0xbf, 0xe6, 0x74, 0xda, 0xb6, + 0x9e, 0xfd, 0xb3, 0x2c, 0x3d, 0x98, 0xf9, 0x59, 0x71, 0xa0, 0xc9, 0x3b, 0xfe, 0x39, 0x27, 0x55, + 0x22, 0xe2, 0x6e, 0xe7, 0xc0, 0x47, 0xcc, 0x73, 0xc0, 0xa2, 0x41, 0xae, 0xcb, 0xfd, 0xcf, 0x3b, + 0x30, 0x22, 0xe1, 0x54, 0x3d, 0x8e, 0xd1, 0x2d, 0x18, 0x90, 0x23, 0x15, 0x4f, 0xcf, 0xa6, 0x2f, + 0x40, 0x29, 0xf1, 0x6a, 0x30, 0x8a, 0x9b, 0xfb, 0x9d, 0x3e, 0x40, 0xe9, 0x59, 0xd5, 0x0a, 0x63, + 0x9f, 0x49, 0xa2, 0x7b, 0x38, 0x85, 0x02, 0xed, 0x14, 0xba, 0x61, 0xf3, 0x14, 0x4a, 0x87, 0x65, + 0x9c, 0x47, 0xbf, 0x96, 0x91, 0xdb, 0xfc, 0x60, 0xfa, 0xf0, 0xb1, 0xc8, 0x6d, 0x6d, 0x08, 0x07, + 0x4b, 0xf0, 0x6d, 0x21, 0xc1, 0xf9, 0xd1, 0xf5, 0x4b, 0x76, 0x25, 0xb8, 0x36, 0x8a, 0xac, 0x2c, + 0x8f, 0xb8, 0x84, 0xe5, 0x67, 0xd7, 0x4d, 0xab, 0x12, 0x56, 0xe3, 0x6a, 0xca, 0xda, 0x88, 0xcb, + 0xda, 0x3e, 0x5b, 0x3c, 0x35, 0x59, 0x9b, 0xe5, 0xa9, 0xa4, 0xee, 0x6b, 0x52, 0xea, 0xf2, 0x53, + 0xeb, 0x25, 0xcb, 0x52, 0x57, 0xe3, 0xdb, 0x29, 0x7f, 0x3f, 0x0a, 0xe7, 0x3a, 0xf1, 0x30, 0xd9, + 0x44, 0x53, 0x30, 0x58, 0x09, 0x83, 0x4d, 0xbf, 0xb6, 0xe2, 0xb5, 0x84, 0xbd, 0xa6, 0x64, 0xd1, + 0xac, 0x6c, 0xc0, 0x29, 0x0e, 0x7a, 0x8c, 0x0b, 0x1e, 0xee, 0x11, 0x19, 0x12, 0xa8, 0x3d, 0x4b, + 0x64, 0x97, 0x49, 0xa1, 0xf7, 0x0c, 0x7c, 0xf5, 0x1b, 0x13, 0x0f, 0x7d, 0xe2, 0xdf, 0x3f, 0xfe, + 0x90, 0xfb, 0xfb, 0x3d, 0xf0, 0x48, 0x2e, 0x4f, 0xa1, 0xad, 0xff, 0xa6, 0xa1, 0xad, 0x6b, 0xed, + 0x42, 0x8a, 0xdc, 0xb4, 0xa9, 0xc8, 0x6a, 0xe4, 0xf3, 0xf4, 0x72, 0xad, 0x19, 0xe7, 0x0f, 0x8a, + 0x2e, 0x54, 0xe0, 0x35, 0x49, 0xdc, 0xf2, 0x2a, 0x44, 0xcc, 0x5e, 0x2d, 0xd4, 0x35, 0xd9, 0x80, + 0x53, 0x1c, 0x6e, 0x42, 0x6f, 0x7a, 0xed, 0x46, 0x22, 0x1c, 0x65, 0x9a, 0x09, 0xcd, 0xc0, 0x58, + 0xb6, 0xa3, 0xbf, 0xeb, 0x00, 0xea, 0xe4, 0x2a, 0x5e, 0xc4, 0xf5, 0xe3, 0x58, 0x87, 0x99, 0xf3, + 0xfb, 0x9a, 0x11, 0xae, 0xcd, 0x34, 0x67, 0x1c, 0xda, 0x33, 0xfd, 0x58, 0x7a, 0x0e, 0x71, 0xe3, + 0xe0, 0x10, 0x3e, 0x34, 0xe6, 0x6a, 0xa9, 0x54, 0x48, 0x1c, 0x73, 0x77, 0x9c, 0xee, 0x6a, 0x61, + 0x60, 0x2c, 0xdb, 0xd1, 0x04, 0x14, 0x49, 0x14, 0x85, 0x91, 0xb0, 0xb5, 0xd9, 0x36, 0xbe, 0x42, + 0x01, 0x98, 0xc3, 0xdd, 0x9f, 0x14, 0xa0, 0xd4, 0xcd, 0x3a, 0x41, 0xbf, 0xa5, 0xd9, 0xd5, 0xc2, + 0x72, 0x12, 0x86, 0x5f, 0x78, 0x7c, 0x36, 0x51, 0xd6, 0x00, 0xec, 0x62, 0x61, 0x8b, 0x56, 0x9c, + 0x1d, 0xe0, 0xf8, 0x97, 0x34, 0x0b, 0x5b, 0x27, 0x91, 0x73, 0xc0, 0x6f, 0x9a, 0x07, 0xfc, 0x9a, + 0xed, 0x49, 0xe9, 0xc7, 0xfc, 0x1f, 0x16, 0xe1, 0x8c, 0x6c, 0x2d, 0x13, 0x7a, 0x54, 0xbe, 0xd8, + 0x26, 0xd1, 0x2e, 0xfa, 0xa1, 0x03, 0x67, 0xbd, 0xac, 0xeb, 0xc6, 0x27, 0xc7, 0xb0, 0xd0, 0x1a, + 0xd7, 0xc9, 0xe9, 0x1c, 0x8e, 0x7c, 0xa1, 0x2f, 0x8b, 0x85, 0x3e, 0x9b, 0x87, 0xd2, 0xc5, 0xef, + 0x9e, 0x3b, 0x01, 0xf4, 0x3c, 0x0c, 0x4b, 0x38, 0x73, 0xf7, 0xf0, 0x57, 0x5c, 0x39, 0xb7, 0xa7, + 0xb5, 0x36, 0x6c, 0x60, 0xd2, 0x9e, 0x09, 0x69, 0xb6, 0x1a, 0x5e, 0x42, 0x34, 0x47, 0x91, 0xea, + 0xb9, 0xae, 0xb5, 0x61, 0x03, 0x13, 0x3d, 0x05, 0x7d, 0x41, 0x58, 0x25, 0x8b, 0x55, 0xe1, 0x20, + 0x1e, 0x15, 0x7d, 0xfa, 0xae, 0x31, 0x28, 0x16, 0xad, 0xe8, 0xc9, 0xd4, 0x1b, 0x57, 0x64, 0xaf, + 0xd0, 0x50, 0x9e, 0x27, 0x0e, 0xfd, 0x7d, 0x07, 0x06, 0x69, 0x8f, 0xf5, 0xdd, 0x16, 0xa1, 0x67, + 0x1b, 0x7d, 0x22, 0xd5, 0xe3, 0x79, 0x22, 0xd7, 0x24, 0x1b, 0xd3, 0xd5, 0x31, 0xa8, 0xe0, 0x9f, + 0x7c, 0x6b, 0x62, 0x40, 0xfe, 0xc0, 0xe9, 0xa8, 0xc6, 0x17, 0xe0, 0xe1, 0xae, 0x4f, 0xf3, 0x48, + 0xa1, 0x80, 0xbf, 0x02, 0xa3, 0xe6, 0x20, 0x8e, 0x14, 0x07, 0xf8, 0xe7, 0xda, 0x6b, 0xc7, 0xe7, + 0x25, 0xe4, 0xd9, 0x03, 0xd3, 0x66, 0xd5, 0x66, 0x98, 0x13, 0x5b, 0xcf, 0xdc, 0x0c, 0x73, 0x62, + 0x33, 0xcc, 0xb9, 0xbf, 0xe7, 0xa4, 0xaf, 0xa6, 0xa6, 0xe6, 0xd1, 0x83, 0xb9, 0x1d, 0x35, 0x84, + 0x20, 0x56, 0x07, 0xf3, 0x75, 0xbc, 0x8c, 0x29, 0x1c, 0x7d, 0x49, 0x93, 0x8e, 0xb4, 0x5b, 0x5b, + 0x84, 0x35, 0x2c, 0xb9, 0xe8, 0x0d, 0xc2, 0x9d, 0xf2, 0x4f, 0x34, 0xe0, 0xec, 0x10, 0xdc, 0x1f, + 0x3b, 0xf0, 0xd8, 0x81, 0x4a, 0x6b, 0xee, 0xc0, 0x9d, 0x07, 0x3e, 0x70, 0x7a, 0xac, 0x45, 0xa4, + 0x15, 0x5e, 0xc7, 0xcb, 0xe2, 0x79, 0xa9, 0x63, 0x0d, 0x73, 0x30, 0x96, 0xed, 0xee, 0x0f, 0x1d, + 0xc8, 0xd2, 0x43, 0x1e, 0x8c, 0xb6, 0x63, 0x12, 0xd1, 0x13, 0xb2, 0x4c, 0x2a, 0x11, 0x91, 0xbb, + 0xed, 0xc9, 0x49, 0x1e, 0xbc, 0xa7, 0x03, 0x9e, 0xac, 0x84, 0x11, 0x99, 0xdc, 0x7e, 0x76, 0x92, + 0x63, 0x2c, 0x91, 0xdd, 0x32, 0x69, 0x10, 0x4a, 0x63, 0x06, 0xed, 0xef, 0x4d, 0x8c, 0x5e, 0x37, + 0x08, 0xe0, 0x0c, 0x41, 0xca, 0xa2, 0xe5, 0xc5, 0xf1, 0x4e, 0x18, 0x55, 0x05, 0x8b, 0xc2, 0x91, + 0x59, 0xac, 0x19, 0x04, 0x70, 0x86, 0xa0, 0xfb, 0x26, 0xb5, 0x06, 0x75, 0x25, 0x14, 0x7d, 0x83, + 0xaa, 0x32, 0x14, 0x32, 0xd3, 0x08, 0x37, 0x66, 0xc3, 0x20, 0xf1, 0xfc, 0x80, 0xc8, 0xd8, 0xff, + 0xba, 0x25, 0x95, 0xd7, 0xa0, 0x9d, 0xba, 0xe4, 0x3b, 0xdb, 0x70, 0xce, 0x58, 0xa8, 0xca, 0xb2, + 0xd1, 0x08, 0x37, 0xb2, 0x41, 0x3d, 0x8a, 0x84, 0x59, 0x8b, 0xfb, 0x33, 0x07, 0x2e, 0x74, 0xd1, + 0xad, 0xd1, 0x57, 0x1c, 0x18, 0xd9, 0x78, 0x5b, 0xcc, 0xcd, 0x1c, 0x06, 0x7a, 0x1f, 0x8c, 0x52, + 0x00, 0x3d, 0x58, 0xe6, 0xc3, 0xa8, 0xe9, 0x25, 0x62, 0x82, 0x2a, 0xe0, 0x34, 0x63, 0xb4, 0xe2, + 0x0c, 0xb6, 0xfb, 0xeb, 0x05, 0xc8, 0xe1, 0x82, 0x9e, 0x81, 0x01, 0x12, 0x54, 0x5b, 0xa1, 0x1f, + 0x24, 0x42, 0xb6, 0x28, 0x21, 0x76, 0x45, 0xc0, 0xb1, 0xc2, 0x10, 0xe6, 0x84, 0x58, 0x98, 0x42, + 0x87, 0x39, 0x21, 0x46, 0x9e, 0xe2, 0xa0, 0x1a, 0x8c, 0x79, 0x3c, 0x5c, 0xc2, 0xf6, 0x1e, 0xdb, + 0xa6, 0x3d, 0x47, 0xd9, 0xa6, 0x67, 0x59, 0x34, 0x33, 0x43, 0x02, 0x77, 0x10, 0x45, 0xef, 0x86, + 0xa1, 0x76, 0x4c, 0xca, 0x73, 0x4b, 0xb3, 0x11, 0xa9, 0x72, 0x23, 0x57, 0x0b, 0xe3, 0x5d, 0x4f, + 0x9b, 0xb0, 0x8e, 0xe7, 0xfe, 0x4b, 0x07, 0xfa, 0x67, 0xbc, 0xca, 0x56, 0xb8, 0xb9, 0x49, 0x97, + 0xa2, 0xda, 0x8e, 0x52, 0x3f, 0x95, 0xb6, 0x14, 0x73, 0x02, 0x8e, 0x15, 0x06, 0x5a, 0x87, 0x3e, + 0xfe, 0xc2, 0x8b, 0xd7, 0xee, 0x17, 0xb5, 0xf9, 0xa8, 0xb4, 0x1c, 0xb6, 0x1d, 0xda, 0x89, 0xdf, + 0x98, 0xe4, 0x69, 0x39, 0x93, 0x8b, 0x41, 0xb2, 0x1a, 0x95, 0x93, 0xc8, 0x0f, 0x6a, 0x33, 0x40, + 0xa5, 0xff, 0x3c, 0xa3, 0x81, 0x05, 0x2d, 0x3a, 0x8d, 0xa6, 0x77, 0x4b, 0xb2, 0x13, 0xba, 0x86, + 0x9a, 0xc6, 0x4a, 0xda, 0x84, 0x75, 0x3c, 0xf7, 0xf7, 0x1d, 0x18, 0x9c, 0xf1, 0x62, 0xbf, 0xf2, + 0xe7, 0x48, 0xf8, 0x7c, 0x08, 0x8a, 0xb3, 0x5e, 0xa5, 0x4e, 0xd0, 0xf5, 0xac, 0x0d, 0x3b, 0x74, + 0xf9, 0xe9, 0x3c, 0x36, 0xca, 0x9e, 0xd5, 0x39, 0x8d, 0x74, 0xb3, 0x74, 0xdd, 0xb7, 0x1c, 0x18, + 0x9d, 0x6d, 0xf8, 0x24, 0x48, 0x66, 0x49, 0x94, 0xb0, 0x85, 0xab, 0xc1, 0x58, 0x45, 0x41, 0xee, + 0x65, 0xe9, 0xd8, 0x6e, 0x9d, 0xcd, 0x90, 0xc0, 0x1d, 0x44, 0x51, 0x15, 0x4e, 0x71, 0x58, 0xfa, + 0x56, 0x1c, 0x69, 0xfd, 0x98, 0xb3, 0x73, 0xd6, 0xa4, 0x80, 0xb3, 0x24, 0xdd, 0x9f, 0x3a, 0x70, + 0x61, 0xb6, 0xd1, 0x8e, 0x13, 0x12, 0xdd, 0x14, 0xd2, 0x48, 0x6a, 0xab, 0xe8, 0x23, 0x30, 0xd0, + 0x94, 0x01, 0x58, 0xe7, 0x2e, 0x1b, 0x98, 0xc9, 0x33, 0x8a, 0x4d, 0x07, 0xb3, 0xba, 0xf1, 0x2a, + 0xa9, 0x24, 0x2b, 0x24, 0xf1, 0xd2, 0x6c, 0x81, 0x14, 0x86, 0x15, 0x55, 0xd4, 0x82, 0xde, 0xb8, + 0x45, 0x2a, 0xf6, 0x92, 0xb5, 0xe4, 0x1c, 0xca, 0x2d, 0x52, 0x49, 0xe5, 0x3a, 0x0b, 0x1d, 0x32, + 0x4e, 0xee, 0xff, 0x72, 0xe0, 0x91, 0x2e, 0xf3, 0x5d, 0xf6, 0xe3, 0x04, 0xbd, 0xd2, 0x31, 0xe7, + 0xc9, 0xc3, 0xcd, 0x99, 0xf6, 0x66, 0x33, 0x56, 0x02, 0x41, 0x42, 0xb4, 0xf9, 0x7e, 0x0c, 0x8a, + 0x7e, 0x42, 0x9a, 0xd2, 0xab, 0x6c, 0xc1, 0xff, 0xd3, 0x65, 0x2e, 0x33, 0x23, 0x32, 0x65, 0x6f, + 0x91, 0xf2, 0xc3, 0x9c, 0xad, 0xfb, 0xaf, 0x1c, 0xa0, 0x1b, 0xbd, 0xea, 0x8b, 0x58, 0x5d, 0x6f, + 0xb2, 0xdb, 0x92, 0x86, 0xbb, 0x54, 0xe0, 0x7b, 0xa9, 0x3e, 0x7d, 0x67, 0x6f, 0x62, 0x44, 0x21, + 0x32, 0x05, 0x9e, 0xa1, 0xa2, 0x0f, 0x41, 0x5f, 0xcc, 0x8c, 0x5e, 0x21, 0xd9, 0xe7, 0xa5, 0x86, + 0xca, 0x4d, 0xe1, 0x3b, 0x7b, 0x13, 0x87, 0x4a, 0x8c, 0x9c, 0x54, 0xb4, 0x45, 0x58, 0x51, 0x50, + 0xa5, 0x2a, 0x55, 0x93, 0xc4, 0xb1, 0x57, 0x93, 0x36, 0x94, 0x52, 0xa9, 0x56, 0x38, 0x18, 0xcb, + 0x76, 0xf7, 0xcb, 0x0e, 0x8c, 0xa8, 0xf3, 0x84, 0x2a, 0xc8, 0xe8, 0x9a, 0x7e, 0xf2, 0xf0, 0x87, + 0xf7, 0x58, 0x17, 0x21, 0x20, 0xce, 0xd6, 0x83, 0x0f, 0xa6, 0x77, 0xc1, 0x70, 0x95, 0xb4, 0x48, + 0x50, 0x25, 0x41, 0x85, 0x1a, 0xb8, 0xf4, 0xa1, 0x0d, 0xce, 0x8c, 0x51, 0x8b, 0x6e, 0x4e, 0x83, + 0x63, 0x03, 0xcb, 0xfd, 0xa6, 0x03, 0x0f, 0x2b, 0x72, 0x65, 0x92, 0x60, 0x92, 0x44, 0xbb, 0x2a, + 0x11, 0xf2, 0x68, 0x07, 0xc8, 0x4d, 0xaa, 0x61, 0x26, 0x11, 0x67, 0x7e, 0x6f, 0x27, 0xc8, 0x10, + 0xd7, 0x47, 0x19, 0x11, 0x2c, 0xa9, 0xb9, 0xbf, 0xda, 0x03, 0x67, 0xf5, 0x41, 0xaa, 0x77, 0xfe, + 0x97, 0x1d, 0x00, 0xb5, 0x02, 0xf4, 0x8c, 0xec, 0xb1, 0x13, 0x1d, 0x32, 0x9e, 0x54, 0x2a, 0x15, + 0x14, 0x38, 0xc6, 0x1a, 0x5b, 0xf4, 0x12, 0x0c, 0x6f, 0x87, 0x8d, 0x76, 0x93, 0xac, 0xd0, 0x13, + 0x3c, 0x2e, 0xf5, 0xb0, 0x61, 0x4c, 0xe4, 0x3d, 0xcc, 0x1b, 0x29, 0x5e, 0x6a, 0x70, 0x6b, 0xc0, + 0x18, 0x1b, 0xa4, 0xa8, 0x2d, 0x31, 0x12, 0xe9, 0x8f, 0x44, 0x78, 0x9d, 0x3f, 0x68, 0x71, 0x8e, + 0xd9, 0xa7, 0x3e, 0x73, 0x7a, 0x7f, 0x6f, 0x62, 0xc4, 0x00, 0x61, 0x73, 0x10, 0xee, 0x4b, 0xc0, + 0xd6, 0xc2, 0x0f, 0xda, 0x64, 0x35, 0x40, 0x4f, 0x48, 0x2f, 0x18, 0x8f, 0x5c, 0xa8, 0x97, 0x59, + 0xf7, 0x84, 0x51, 0x6b, 0x71, 0xd3, 0xf3, 0x1b, 0x2c, 0x41, 0x90, 0x62, 0x29, 0x6b, 0x71, 0x9e, + 0x41, 0xb1, 0x68, 0x75, 0x27, 0xa1, 0x7f, 0x96, 0xce, 0x9d, 0x44, 0x94, 0xae, 0x9e, 0xd7, 0x3b, + 0x62, 0xe4, 0xf5, 0xca, 0xfc, 0xdd, 0x75, 0x38, 0x37, 0x1b, 0x11, 0x2f, 0x21, 0xe5, 0xe7, 0x66, + 0xda, 0x95, 0x2d, 0x92, 0xf0, 0xe4, 0xa9, 0x18, 0xbd, 0x17, 0x46, 0x42, 0x26, 0xc5, 0x97, 0xc3, + 0xca, 0x96, 0x1f, 0xd4, 0x84, 0x53, 0xf3, 0x9c, 0xa0, 0x32, 0xb2, 0xaa, 0x37, 0x62, 0x13, 0xd7, + 0xfd, 0x8f, 0x05, 0x18, 0x9e, 0x8d, 0xc2, 0x40, 0x4a, 0xaa, 0x13, 0x38, 0x5d, 0x12, 0xe3, 0x74, + 0xb1, 0x10, 0x50, 0xd4, 0xc7, 0xdf, 0xed, 0x84, 0x41, 0xb7, 0x95, 0x88, 0xec, 0xb1, 0x65, 0x15, + 0x18, 0x7c, 0x19, 0xed, 0xf4, 0x61, 0x9b, 0x02, 0xd4, 0xfd, 0x4f, 0x0e, 0x8c, 0xe9, 0xe8, 0x27, + 0x70, 0xa8, 0xc5, 0xe6, 0xa1, 0x76, 0xcd, 0xee, 0x7c, 0xbb, 0x9c, 0x64, 0x9f, 0xef, 0x33, 0xe7, + 0xc9, 0xa2, 0xc9, 0x5f, 0x75, 0x60, 0x78, 0x47, 0x03, 0x88, 0xc9, 0xda, 0xd6, 0x2b, 0xde, 0x21, + 0xc5, 0x8c, 0x0e, 0xbd, 0x93, 0xf9, 0x8d, 0x8d, 0x91, 0x50, 0xb9, 0x1f, 0x57, 0xea, 0xa4, 0xda, + 0x6e, 0x48, 0xbf, 0xa2, 0x5a, 0xd2, 0xb2, 0x80, 0x63, 0x85, 0x81, 0x5e, 0x81, 0xd3, 0x95, 0x30, + 0xa8, 0xb4, 0xa3, 0x88, 0x04, 0x95, 0xdd, 0x35, 0x76, 0x15, 0x41, 0x1c, 0x88, 0x93, 0xa2, 0xdb, + 0xe9, 0xd9, 0x2c, 0xc2, 0x9d, 0x3c, 0x20, 0xee, 0x24, 0xc4, 0xdd, 0xf1, 0x31, 0x3d, 0xb2, 0x84, + 0x0d, 0xa4, 0xb9, 0xe3, 0x19, 0x18, 0xcb, 0x76, 0x74, 0x1d, 0x2e, 0xc4, 0x89, 0x17, 0x25, 0x7e, + 0x50, 0x9b, 0x23, 0x5e, 0xb5, 0xe1, 0x07, 0x54, 0xbb, 0x0f, 0x83, 0x2a, 0x0f, 0xd6, 0xf5, 0xcc, + 0x3c, 0xb2, 0xbf, 0x37, 0x71, 0xa1, 0x9c, 0x8f, 0x82, 0xbb, 0xf5, 0x45, 0x1f, 0x82, 0x71, 0xe1, + 0xf0, 0xdf, 0x6c, 0x37, 0x5e, 0x08, 0x37, 0xe2, 0xab, 0x7e, 0x4c, 0x4d, 0xeb, 0x65, 0xbf, 0xe9, + 0x27, 0x2c, 0x24, 0x57, 0x9c, 0xb9, 0xb8, 0xbf, 0x37, 0x31, 0x5e, 0xee, 0x8a, 0x85, 0x0f, 0xa0, + 0x80, 0x30, 0x9c, 0xe7, 0xc2, 0xaf, 0x83, 0x76, 0x3f, 0xa3, 0x3d, 0xbe, 0xbf, 0x37, 0x71, 0x7e, + 0x3e, 0x17, 0x03, 0x77, 0xe9, 0x49, 0x9f, 0x60, 0xe2, 0x37, 0xc9, 0x6b, 0x61, 0x40, 0x58, 0x2a, + 0x88, 0xf6, 0x04, 0xd7, 0x05, 0x1c, 0x2b, 0x0c, 0xf4, 0x6a, 0xba, 0x13, 0xe9, 0xeb, 0x22, 0x52, + 0x3a, 0x8e, 0x2e, 0xe1, 0x98, 0xb5, 0x70, 0x53, 0xa3, 0xc4, 0x72, 0x15, 0x0d, 0xda, 0xee, 0x1f, + 0x14, 0x00, 0x75, 0x8a, 0x08, 0xb4, 0x04, 0x7d, 0x5e, 0x25, 0xf1, 0xb7, 0x65, 0xee, 0xdb, 0x13, + 0x79, 0xc7, 0x27, 0x67, 0x85, 0xc9, 0x26, 0xa1, 0x3b, 0x84, 0xa4, 0x72, 0x65, 0x9a, 0x75, 0xc5, + 0x82, 0x04, 0x0a, 0xe1, 0x74, 0xc3, 0x8b, 0x13, 0xb9, 0x57, 0xab, 0x74, 0xca, 0x42, 0xb0, 0xfe, + 0xfc, 0xe1, 0x26, 0x45, 0x7b, 0xcc, 0x9c, 0xa3, 0x3b, 0x77, 0x39, 0x4b, 0x08, 0x77, 0xd2, 0x46, + 0x1f, 0x67, 0x7a, 0x08, 0x57, 0x12, 0xa5, 0x02, 0xb0, 0x64, 0xe5, 0x8c, 0xe6, 0x34, 0x0d, 0x1d, + 0x44, 0xb0, 0xc1, 0x1a, 0x4b, 0xf7, 0x5f, 0x03, 0xf4, 0xcf, 0x4d, 0x2f, 0xac, 0x7b, 0xf1, 0xd6, + 0x21, 0x42, 0x5c, 0x74, 0x77, 0x08, 0x1d, 0x2a, 0xfb, 0x7e, 0x4b, 0xdd, 0x0a, 0x2b, 0x0c, 0x14, + 0x40, 0x9f, 0x1f, 0xd0, 0x17, 0xa2, 0x34, 0x6a, 0xcb, 0xc1, 0xac, 0x34, 0x7f, 0xe6, 0x32, 0x58, + 0x64, 0xd4, 0xb1, 0xe0, 0x82, 0x6e, 0xc3, 0xa0, 0x27, 0xef, 0x8e, 0x88, 0x63, 0x69, 0xc9, 0x86, + 0xe7, 0x54, 0x90, 0xd4, 0x73, 0x57, 0x04, 0x08, 0xa7, 0x0c, 0xd1, 0x27, 0x1c, 0x18, 0x92, 0x53, + 0xc7, 0x64, 0x53, 0x04, 0x35, 0x57, 0xec, 0xcd, 0x19, 0x93, 0x4d, 0x9e, 0xd8, 0xa0, 0x01, 0xb0, + 0xce, 0xb2, 0x43, 0x95, 0x2f, 0x1e, 0x46, 0x95, 0x47, 0x3b, 0x30, 0xb8, 0xe3, 0x27, 0x75, 0x76, + 0xf0, 0x88, 0x60, 0xca, 0xfc, 0xfd, 0x8f, 0x9a, 0x92, 0x4b, 0x57, 0xec, 0xa6, 0x64, 0x80, 0x53, + 0x5e, 0x68, 0x8a, 0x33, 0x66, 0x77, 0x6f, 0x98, 0xc8, 0x1a, 0x34, 0x3b, 0xb0, 0x06, 0x9c, 0xe2, + 0xd0, 0x25, 0x1e, 0xa6, 0xbf, 0xca, 0xe4, 0xa3, 0x6d, 0xfa, 0x1e, 0x8b, 0x64, 0x35, 0x0b, 0xfb, + 0x4a, 0x52, 0xe4, 0x8b, 0x75, 0x53, 0xe3, 0x81, 0x0d, 0x8e, 0xf4, 0x1d, 0xd9, 0xa9, 0x93, 0x40, + 0x24, 0xd3, 0xab, 0x77, 0xe4, 0x66, 0x9d, 0x04, 0x98, 0xb5, 0xa0, 0xdb, 0xdc, 0xb4, 0xe0, 0x3a, + 0xae, 0x48, 0x3c, 0x5b, 0xb6, 0xa3, 0x76, 0x73, 0x9a, 0x3c, 0x9f, 0x3d, 0xfd, 0x8d, 0x35, 0x7e, + 0x54, 0x5d, 0x0e, 0x83, 0x2b, 0xb7, 0xfc, 0x44, 0x64, 0xe1, 0x2b, 0x49, 0xb7, 0xca, 0xa0, 0x58, + 0xb4, 0xf2, 0xa0, 0x3d, 0xdd, 0x04, 0x31, 0x4b, 0xb9, 0x1f, 0xd4, 0x83, 0xf6, 0x0c, 0x8c, 0x65, + 0x3b, 0xfa, 0x7b, 0x0e, 0x14, 0xeb, 0x61, 0xb8, 0x15, 0x97, 0x46, 0xd8, 0xe6, 0xb0, 0xa0, 0xea, + 0x09, 0x89, 0x33, 0x79, 0x95, 0x92, 0x35, 0xef, 0x15, 0x15, 0x19, 0xec, 0xce, 0xde, 0xc4, 0xe8, + 0xb2, 0xbf, 0x49, 0x2a, 0xbb, 0x95, 0x06, 0x61, 0x90, 0x4f, 0xbe, 0xa5, 0x41, 0xae, 0x6c, 0x93, + 0x20, 0xc1, 0x7c, 0x54, 0xe3, 0x9f, 0x77, 0x00, 0x52, 0x42, 0x39, 0xd1, 0x31, 0x62, 0xc6, 0x93, + 0x2d, 0xd8, 0x79, 0xc6, 0xd0, 0xf4, 0x70, 0xdb, 0xbf, 0x71, 0x60, 0x88, 0x4e, 0x4e, 0x8a, 0xc0, + 0xa7, 0xa0, 0x2f, 0xf1, 0xa2, 0x1a, 0x91, 0x2e, 0x65, 0xf5, 0x38, 0xd6, 0x19, 0x14, 0x8b, 0x56, + 0x14, 0x40, 0x31, 0xf1, 0xe2, 0x2d, 0xa9, 0x5d, 0x2e, 0x5a, 0x5b, 0xe2, 0x54, 0xb1, 0xa4, 0xbf, + 0x62, 0xcc, 0xd9, 0xa0, 0xa7, 0x61, 0x80, 0x2a, 0x00, 0xf3, 0x5e, 0x2c, 0x93, 0x36, 0x86, 0xa9, + 0x10, 0x9f, 0x17, 0x30, 0xac, 0x5a, 0xdd, 0x5f, 0x2f, 0x40, 0xef, 0x1c, 0xb7, 0x33, 0xfa, 0xe2, + 0xb0, 0x1d, 0x55, 0x88, 0xd0, 0x37, 0x2d, 0xec, 0x69, 0x4a, 0xb7, 0xcc, 0x68, 0x6a, 0x9a, 0x3e, + 0xfb, 0x8d, 0x05, 0x2f, 0x6a, 0xc8, 0x8e, 0x26, 0x91, 0x17, 0xc4, 0x9b, 0xcc, 0x79, 0xef, 0x87, + 0x81, 0x58, 0x22, 0x0b, 0xbb, 0x70, 0xdd, 0xa0, 0x5b, 0x4e, 0x48, 0x2b, 0x8d, 0x21, 0x98, 0x6d, + 0x38, 0x33, 0x06, 0xf7, 0x37, 0x1c, 0x80, 0x74, 0xf4, 0xe8, 0x73, 0x0e, 0x8c, 0x78, 0x7a, 0xb2, + 0xa0, 0x58, 0xa3, 0x55, 0x7b, 0x81, 0x3b, 0x46, 0x96, 0x9b, 0xd8, 0x06, 0x08, 0x9b, 0x8c, 0xdd, + 0x77, 0x43, 0x91, 0xbd, 0x1d, 0x4c, 0x17, 0x17, 0x5e, 0xd2, 0xac, 0x0f, 0x46, 0x7a, 0x4f, 0xb1, + 0xc2, 0x70, 0x5f, 0x81, 0xd1, 0x2b, 0xb7, 0x48, 0xa5, 0x9d, 0x84, 0x11, 0xf7, 0x11, 0x77, 0xb9, + 0x1c, 0xe2, 0xdc, 0xd3, 0xe5, 0x90, 0xef, 0x3a, 0x30, 0xa4, 0x65, 0x8e, 0xd1, 0x93, 0xba, 0x36, + 0x5b, 0xe6, 0x76, 0xb7, 0x58, 0xaa, 0x25, 0x2b, 0xb9, 0x69, 0x9c, 0x64, 0x7a, 0x8c, 0x28, 0x10, + 0x4e, 0x19, 0xde, 0x25, 0xb3, 0xcb, 0xfd, 0x5d, 0x07, 0xce, 0xe5, 0xa6, 0xb9, 0x3d, 0xe0, 0x61, + 0x4f, 0xc1, 0xe0, 0x16, 0xd9, 0x35, 0x42, 0x5e, 0xaa, 0xc3, 0x92, 0x6c, 0xc0, 0x29, 0x8e, 0xfb, + 0x3d, 0x07, 0x52, 0x4a, 0x54, 0x14, 0x6d, 0xa4, 0x23, 0xd7, 0x44, 0x91, 0xe0, 0x24, 0x5a, 0xd1, + 0x6d, 0xb8, 0x60, 0x3e, 0xc1, 0x7b, 0xf4, 0xcc, 0x73, 0x9b, 0x29, 0x9f, 0x12, 0xee, 0xc6, 0xc2, + 0xbd, 0x01, 0xc5, 0x05, 0xaf, 0x5d, 0x23, 0x87, 0x72, 0xe2, 0x50, 0x31, 0x16, 0x11, 0xaf, 0x91, + 0x48, 0x35, 0x5d, 0x88, 0x31, 0x2c, 0x60, 0x58, 0xb5, 0xba, 0x3f, 0x2c, 0xc2, 0x90, 0x76, 0x99, + 0x81, 0x9e, 0xe3, 0x11, 0x69, 0x85, 0x59, 0x5d, 0x97, 0x3e, 0x6c, 0xcc, 0x5a, 0xe8, 0xfb, 0x13, + 0x91, 0x6d, 0x3f, 0xe6, 0x22, 0xc7, 0x78, 0x7f, 0xb0, 0x80, 0x63, 0x85, 0x81, 0x26, 0xa0, 0x58, + 0x25, 0xad, 0xa4, 0xce, 0xa4, 0x69, 0x2f, 0xcf, 0xe8, 0x9a, 0xa3, 0x00, 0xcc, 0xe1, 0x14, 0x61, + 0x93, 0x24, 0x95, 0x3a, 0x73, 0x36, 0x8a, 0x94, 0xaf, 0x79, 0x0a, 0xc0, 0x1c, 0x9e, 0x13, 0xab, + 0x2a, 0x1e, 0x7f, 0xac, 0xaa, 0xcf, 0x72, 0xac, 0x0a, 0xb5, 0xe0, 0x4c, 0x1c, 0xd7, 0xd7, 0x22, + 0x7f, 0xdb, 0x4b, 0x48, 0xba, 0x73, 0xfa, 0x8f, 0xc2, 0xe7, 0x02, 0xbb, 0x5e, 0x5c, 0xbe, 0x9a, + 0xa5, 0x82, 0xf3, 0x48, 0xa3, 0x32, 0x9c, 0xf3, 0x83, 0x98, 0x54, 0xda, 0x11, 0x59, 0xac, 0x05, + 0x61, 0x44, 0xae, 0x86, 0x31, 0x25, 0x27, 0x2e, 0x47, 0xaa, 0x24, 0xc8, 0xc5, 0x3c, 0x24, 0x9c, + 0xdf, 0x17, 0x2d, 0xc0, 0xe9, 0xaa, 0x1f, 0x7b, 0x1b, 0x0d, 0x52, 0x6e, 0x6f, 0x34, 0x43, 0x6a, + 0xb0, 0xf1, 0x0b, 0x0b, 0x03, 0x33, 0x0f, 0x4b, 0xd7, 0xc4, 0x5c, 0x16, 0x01, 0x77, 0xf6, 0x41, + 0xcf, 0xc3, 0x70, 0xec, 0x07, 0xb5, 0x06, 0x99, 0x89, 0xbc, 0xa0, 0x52, 0x17, 0xb7, 0x2a, 0x95, + 0x0b, 0xb7, 0xac, 0xb5, 0x61, 0x03, 0x93, 0xbd, 0xaf, 0xbc, 0x4f, 0x46, 0x93, 0x13, 0xd8, 0xa2, + 0xd5, 0xfd, 0x91, 0x03, 0xc3, 0x7a, 0x02, 0x32, 0xd5, 0x92, 0xa1, 0x3e, 0x37, 0x5f, 0xe6, 0x72, + 0xdc, 0xde, 0x69, 0x7d, 0x55, 0xd1, 0x4c, 0xad, 0xca, 0x14, 0x86, 0x35, 0x9e, 0x87, 0xb8, 0x4e, + 0xfc, 0x04, 0x14, 0x37, 0x43, 0xaa, 0x4c, 0xf4, 0x98, 0xbe, 0xdf, 0x79, 0x0a, 0xc4, 0xbc, 0xcd, + 0xfd, 0x1f, 0x0e, 0x9c, 0xcf, 0xcf, 0xad, 0x7e, 0x3b, 0x4c, 0xf2, 0x32, 0x00, 0x9d, 0x8a, 0x21, + 0x90, 0xb5, 0x82, 0x02, 0xb2, 0x05, 0x6b, 0x58, 0x87, 0x9b, 0xf6, 0x9f, 0x50, 0x85, 0x36, 0xe5, + 0xf3, 0x05, 0x07, 0x46, 0x28, 0xdb, 0xa5, 0x68, 0xc3, 0x98, 0xed, 0xaa, 0x9d, 0xd9, 0x2a, 0xb2, + 0xa9, 0x8b, 0xdb, 0x00, 0x63, 0x93, 0x39, 0xfa, 0x05, 0x18, 0xf4, 0xaa, 0xd5, 0x88, 0xc4, 0xb1, + 0x0a, 0x16, 0xb1, 0xd0, 0xf2, 0xb4, 0x04, 0xe2, 0xb4, 0x9d, 0x0a, 0xd1, 0x7a, 0x75, 0x33, 0xa6, + 0x72, 0x49, 0x78, 0xf6, 0x94, 0x10, 0xa5, 0x4c, 0x28, 0x1c, 0x2b, 0x0c, 0xf7, 0x6f, 0xf6, 0x82, + 0xc9, 0x1b, 0x55, 0xe1, 0xd4, 0x56, 0xb4, 0x31, 0xcb, 0xc2, 0xdf, 0xf7, 0x12, 0x86, 0x66, 0xe1, + 0xe1, 0x25, 0x93, 0x02, 0xce, 0x92, 0x14, 0x5c, 0x96, 0xc8, 0x6e, 0xe2, 0x6d, 0xdc, 0x73, 0x10, + 0x7a, 0xc9, 0xa4, 0x80, 0xb3, 0x24, 0xd1, 0xbb, 0x61, 0x68, 0x2b, 0xda, 0x90, 0x22, 0x3a, 0x9b, + 0xd1, 0xb0, 0x94, 0x36, 0x61, 0x1d, 0x8f, 0x2e, 0xe1, 0x56, 0xb4, 0x41, 0x8f, 0x34, 0x79, 0xbd, + 0x5e, 0x2d, 0xe1, 0x92, 0x80, 0x63, 0x85, 0x81, 0x5a, 0x80, 0xb6, 0xe4, 0xea, 0xa9, 0x60, 0xbf, + 0x38, 0x49, 0x0e, 0x9f, 0x2b, 0xc0, 0x92, 0xa6, 0x97, 0x3a, 0xe8, 0xe0, 0x1c, 0xda, 0xe8, 0x25, + 0xb8, 0xb0, 0x15, 0x6d, 0x88, 0x83, 0x7e, 0x2d, 0xf2, 0x83, 0x8a, 0xdf, 0x32, 0xae, 0xd2, 0x4f, + 0x88, 0xe1, 0x5e, 0x58, 0xca, 0x47, 0xc3, 0xdd, 0xfa, 0xbb, 0xbf, 0xd5, 0x0b, 0xec, 0x12, 0x20, + 0x95, 0x85, 0x4d, 0x92, 0xd4, 0xc3, 0x6a, 0x56, 0x77, 0x59, 0x61, 0x50, 0x2c, 0x5a, 0x65, 0x6a, + 0x60, 0xa1, 0x4b, 0x6a, 0xe0, 0x0e, 0xf4, 0xd7, 0x89, 0x57, 0x25, 0x91, 0x74, 0xb5, 0x2d, 0xdb, + 0xb9, 0xb6, 0x78, 0x95, 0x11, 0x4d, 0x4d, 0x68, 0xfe, 0x3b, 0xc6, 0x92, 0x1b, 0x7a, 0x0f, 0x8c, + 0x52, 0x2d, 0x24, 0x6c, 0x27, 0xd2, 0xaf, 0xdc, 0xcb, 0xfc, 0xca, 0xec, 0x44, 0x5d, 0x37, 0x5a, + 0x70, 0x06, 0x13, 0xcd, 0xc1, 0x98, 0xf0, 0x01, 0x2b, 0x17, 0x9e, 0x58, 0x58, 0x55, 0xe3, 0xa0, + 0x9c, 0x69, 0xc7, 0x1d, 0x3d, 0x58, 0x2e, 0x58, 0x58, 0xe5, 0x61, 0x40, 0x3d, 0x17, 0x2c, 0xac, + 0xee, 0x62, 0xd6, 0x82, 0x5e, 0x83, 0x01, 0xfa, 0x77, 0x3e, 0x0a, 0x9b, 0xc2, 0xaf, 0xb2, 0x66, + 0x67, 0x75, 0x28, 0x0f, 0x61, 0xe5, 0x31, 0xed, 0x6c, 0x46, 0x70, 0xc1, 0x8a, 0x1f, 0xb5, 0x35, + 0xe4, 0x39, 0x5c, 0xde, 0xf2, 0x5b, 0x37, 0x48, 0xe4, 0x6f, 0xee, 0x32, 0xa5, 0x61, 0x20, 0xb5, + 0x35, 0x16, 0x3b, 0x30, 0x70, 0x4e, 0x2f, 0xf7, 0x0b, 0x05, 0x18, 0xd6, 0xef, 0x92, 0xde, 0x2d, + 0x5f, 0x34, 0x4e, 0x37, 0x05, 0xb7, 0x2c, 0xaf, 0x5a, 0x98, 0xf6, 0xdd, 0x36, 0x44, 0x1d, 0x7a, + 0xbd, 0xb6, 0xd0, 0x16, 0xad, 0x38, 0xb0, 0xd8, 0x8c, 0xdb, 0x49, 0x9d, 0x5f, 0x3a, 0x62, 0x99, + 0x9c, 0x8c, 0x83, 0xfb, 0xe9, 0x1e, 0x18, 0x90, 0x8d, 0xe8, 0x53, 0x0e, 0x40, 0x9a, 0x82, 0x23, + 0x44, 0xe9, 0x9a, 0x8d, 0xfc, 0x0c, 0x3d, 0x7b, 0x48, 0x73, 0x3a, 0x2b, 0x38, 0xd6, 0xf8, 0xa2, + 0x04, 0xfa, 0x42, 0x3a, 0xb8, 0xcb, 0xf6, 0xee, 0x43, 0xaf, 0x52, 0xc6, 0x97, 0x19, 0xf7, 0xd4, + 0xe5, 0xc5, 0x60, 0x58, 0xf0, 0xa2, 0xd6, 0xdb, 0x86, 0xcc, 0x0c, 0xb3, 0xe7, 0x1e, 0x56, 0xc9, + 0x66, 0xa9, 0x31, 0xa6, 0x40, 0x38, 0x65, 0xe8, 0x3e, 0x0b, 0xa3, 0xe6, 0xcb, 0x40, 0x2d, 0x82, + 0x8d, 0xdd, 0x84, 0x70, 0x5f, 0xc1, 0x30, 0xb7, 0x08, 0x66, 0x28, 0x00, 0x73, 0xb8, 0xfb, 0x26, + 0xd5, 0x03, 0x94, 0x78, 0x39, 0x84, 0x7b, 0xfe, 0x09, 0xdd, 0xd1, 0xd5, 0xcd, 0x66, 0xfa, 0x38, + 0x0c, 0xb2, 0x7f, 0xd8, 0x8b, 0xde, 0x63, 0x2b, 0x68, 0x9c, 0x8e, 0x53, 0xbc, 0xea, 0x4c, 0x27, + 0xb8, 0x21, 0x19, 0xe1, 0x94, 0xa7, 0x1b, 0xc2, 0x58, 0x16, 0x1b, 0x7d, 0x10, 0x86, 0x63, 0x79, + 0xac, 0xa6, 0x37, 0xa3, 0x0e, 0x79, 0xfc, 0x32, 0x9f, 0x6d, 0x59, 0xeb, 0x8e, 0x0d, 0x62, 0xee, + 0x2a, 0xf4, 0x59, 0x5d, 0x42, 0xf7, 0xdb, 0x0e, 0x0c, 0xb2, 0xa8, 0x59, 0x2d, 0xf2, 0x9a, 0x69, + 0x97, 0x9e, 0x03, 0x56, 0x3d, 0x86, 0x7e, 0x6e, 0x5f, 0xcb, 0x6c, 0x13, 0x0b, 0x52, 0x86, 0x97, + 0x31, 0x4b, 0xa5, 0x0c, 0x37, 0xe4, 0x63, 0x2c, 0x39, 0xb9, 0x9f, 0x29, 0x40, 0xdf, 0x62, 0xd0, + 0x6a, 0xff, 0x85, 0x2f, 0xa5, 0xb5, 0x02, 0xbd, 0x8b, 0x09, 0x69, 0x9a, 0x15, 0xdf, 0x86, 0x67, + 0x9e, 0xd4, 0xab, 0xbd, 0x95, 0xcc, 0x6a, 0x6f, 0xd8, 0xdb, 0x91, 0xc9, 0x58, 0xc2, 0xbf, 0x9b, + 0xde, 0x0e, 0x7b, 0x06, 0x06, 0x97, 0xbd, 0x0d, 0xd2, 0x58, 0x22, 0xbb, 0xec, 0x2e, 0x17, 0x4f, + 0x0c, 0x70, 0x52, 0xc3, 0xde, 0x08, 0xe2, 0xcf, 0xc1, 0x28, 0xc3, 0x56, 0x2f, 0x03, 0xb5, 0x1c, + 0x48, 0x5a, 0x2e, 0xc7, 0x31, 0x2d, 0x07, 0xad, 0x54, 0x8e, 0x86, 0xe5, 0x4e, 0xc2, 0x50, 0x4a, + 0xe5, 0x10, 0x5c, 0x7f, 0x56, 0x80, 0x11, 0xc3, 0x4d, 0x6d, 0x04, 0xef, 0x9c, 0xbb, 0x06, 0xef, + 0x8c, 0x60, 0x5a, 0xe1, 0x41, 0x07, 0xd3, 0x7a, 0x4e, 0x3e, 0x98, 0x66, 0x3e, 0xa4, 0xde, 0x43, + 0x3d, 0xa4, 0x06, 0xf4, 0x2e, 0xfb, 0xc1, 0xd6, 0xe1, 0xe4, 0x4c, 0x5c, 0x09, 0x5b, 0x1d, 0x72, + 0xa6, 0x4c, 0x81, 0x98, 0xb7, 0x49, 0xcd, 0xa5, 0x27, 0x5f, 0x73, 0x71, 0x3f, 0xe5, 0xc0, 0xf0, + 0x8a, 0x17, 0xf8, 0x9b, 0x24, 0x4e, 0xd8, 0xbe, 0x4a, 0x8e, 0xf5, 0x4e, 0xcf, 0x70, 0x97, 0xdb, + 0xe9, 0x9f, 0x74, 0xe0, 0xf4, 0x0a, 0x69, 0x86, 0xfe, 0x6b, 0x5e, 0x9a, 0xeb, 0x48, 0xc7, 0x5e, + 0xf7, 0x13, 0x91, 0xda, 0xa5, 0xc6, 0x7e, 0xd5, 0x4f, 0x30, 0x85, 0xdf, 0xc5, 0x07, 0xcb, 0xd2, + 0xeb, 0xa9, 0x81, 0xa6, 0xdd, 0x33, 0x4b, 0xb3, 0x18, 0x65, 0x03, 0x4e, 0x71, 0xdc, 0xdf, 0x76, + 0xa0, 0x9f, 0x0f, 0x82, 0x48, 0xda, 0x4e, 0x17, 0xda, 0x75, 0x28, 0xb2, 0x7e, 0x62, 0x57, 0x2f, + 0x58, 0x50, 0x7f, 0x28, 0x39, 0xfe, 0x0e, 0xb2, 0x7f, 0x31, 0x67, 0xc0, 0xcc, 0x16, 0xef, 0xd6, + 0xb4, 0x4a, 0xf3, 0x4c, 0xcd, 0x16, 0x06, 0xc5, 0xa2, 0xd5, 0xfd, 0x7a, 0x0f, 0x0c, 0xa8, 0xa2, + 0x4c, 0xec, 0xca, 0x7c, 0x10, 0x84, 0x89, 0xc7, 0x93, 0x02, 0xb8, 0xac, 0xfe, 0xa0, 0xbd, 0xa2, + 0x50, 0x93, 0xd3, 0x29, 0x75, 0x1e, 0x7b, 0x53, 0x46, 0xa8, 0xd6, 0x82, 0xf5, 0x41, 0xa0, 0x8f, + 0x41, 0x5f, 0x83, 0x4a, 0x1f, 0x29, 0xba, 0x6f, 0x58, 0x1c, 0x0e, 0x13, 0x6b, 0x62, 0x24, 0x6a, + 0x85, 0x38, 0x10, 0x0b, 0xae, 0xe3, 0xef, 0x83, 0xb1, 0xec, 0xa8, 0xef, 0x76, 0x0d, 0x6e, 0x50, + 0xbf, 0x44, 0xf7, 0x97, 0x85, 0xf4, 0x3c, 0x7a, 0x57, 0xf7, 0x45, 0x18, 0x5a, 0x21, 0x49, 0xe4, + 0x57, 0x18, 0x81, 0xbb, 0x6d, 0xae, 0x43, 0xe9, 0x0f, 0x9f, 0x65, 0x9b, 0x95, 0xd2, 0x8c, 0xd1, + 0x6d, 0x80, 0x56, 0x14, 0x52, 0xfb, 0x95, 0xb4, 0xe5, 0xc3, 0xb6, 0xa0, 0x0f, 0xaf, 0x29, 0x9a, + 0x3c, 0x5c, 0x9c, 0xfe, 0xc6, 0x1a, 0x3f, 0xf7, 0x12, 0x14, 0x57, 0xda, 0x09, 0xb9, 0x75, 0x77, + 0x89, 0xe5, 0x7e, 0x10, 0x86, 0x19, 0xea, 0xd5, 0xb0, 0x41, 0x4f, 0x49, 0x3a, 0xd3, 0x26, 0xfd, + 0x9d, 0x75, 0xd0, 0x33, 0x24, 0xcc, 0xdb, 0xe8, 0x1b, 0x50, 0x0f, 0x1b, 0x55, 0x75, 0x47, 0x46, + 0x3d, 0xdf, 0xab, 0x0c, 0x8a, 0x45, 0xab, 0xfb, 0xcb, 0x05, 0x18, 0x62, 0x1d, 0x85, 0xf4, 0xd8, + 0x85, 0xfe, 0x3a, 0xe7, 0x23, 0x96, 0xc4, 0x42, 0x76, 0x9b, 0x3e, 0x7a, 0xcd, 0x34, 0xe3, 0x00, + 0x2c, 0xf9, 0x51, 0xd6, 0x3b, 0x9e, 0x9f, 0x50, 0xd6, 0x85, 0xe3, 0x65, 0x7d, 0x93, 0xb3, 0xc1, + 0x92, 0x9f, 0xfb, 0xe5, 0x02, 0x00, 0x2b, 0x99, 0xc5, 0xaf, 0x68, 0xfe, 0x22, 0x14, 0x5b, 0x75, + 0x2f, 0xce, 0x06, 0xdd, 0x8a, 0x6b, 0x14, 0x78, 0x47, 0x5c, 0x42, 0x65, 0x3f, 0x30, 0x47, 0xd4, + 0x13, 0xcb, 0x0b, 0x07, 0x27, 0x96, 0xa3, 0x16, 0xf4, 0x87, 0xed, 0x84, 0xea, 0x86, 0xe2, 0x70, + 0xb5, 0x10, 0x73, 0x5e, 0xe5, 0x04, 0x79, 0x36, 0xb6, 0xf8, 0x81, 0x25, 0x1b, 0xf4, 0x3c, 0x0c, + 0xb4, 0xa2, 0xb0, 0x46, 0xcf, 0x4a, 0x71, 0x9c, 0x3e, 0x2a, 0xf5, 0x8f, 0x35, 0x01, 0xbf, 0xa3, + 0xfd, 0x8f, 0x15, 0xb6, 0xfb, 0x93, 0x53, 0x7c, 0x5d, 0xc4, 0xe6, 0x18, 0x87, 0x82, 0x2f, 0x3d, + 0x41, 0x20, 0x48, 0x14, 0x16, 0xe7, 0x70, 0xc1, 0xaf, 0xaa, 0x7d, 0x5c, 0xe8, 0x7a, 0xf2, 0xbe, + 0x1b, 0x86, 0xaa, 0x7e, 0xdc, 0x6a, 0x78, 0xbb, 0xd7, 0x72, 0xdc, 0x70, 0x73, 0x69, 0x13, 0xd6, + 0xf1, 0xd0, 0x33, 0xe2, 0x1a, 0x41, 0xaf, 0xe1, 0x7a, 0x91, 0xd7, 0x08, 0xd2, 0x2b, 0xc0, 0xfc, + 0x06, 0x41, 0xf6, 0xaa, 0x74, 0xf1, 0xd0, 0x57, 0xa5, 0xb3, 0x9a, 0x4f, 0xdf, 0xc9, 0x6b, 0x3e, + 0xef, 0x85, 0x11, 0xf9, 0x93, 0xa9, 0x23, 0xa5, 0xb3, 0x6c, 0xf4, 0xca, 0x3d, 0xbc, 0xae, 0x37, + 0x62, 0x13, 0x37, 0xdd, 0xb4, 0xfd, 0x87, 0xdd, 0xb4, 0x97, 0x01, 0x36, 0xc2, 0x76, 0x50, 0xf5, + 0xa2, 0xdd, 0xc5, 0x39, 0x91, 0x74, 0xa8, 0x14, 0xad, 0x19, 0xd5, 0x82, 0x35, 0x2c, 0x7d, 0xa3, + 0x0f, 0xde, 0x65, 0xa3, 0x7f, 0x10, 0x06, 0x59, 0x82, 0x26, 0xa9, 0x4e, 0x27, 0x22, 0x1d, 0xe7, + 0x28, 0xb9, 0x7c, 0x4a, 0xed, 0x28, 0x4b, 0x22, 0x38, 0xa5, 0x87, 0x3e, 0x04, 0xb0, 0xe9, 0x07, + 0x7e, 0x5c, 0x67, 0xd4, 0x87, 0x8e, 0x4c, 0x5d, 0xcd, 0x73, 0x5e, 0x51, 0xc1, 0x1a, 0x45, 0xf4, + 0x0a, 0x9c, 0x26, 0x71, 0xe2, 0x37, 0xbd, 0x84, 0x54, 0xd5, 0x5d, 0xb8, 0x12, 0xf3, 0x1d, 0xaa, + 0x14, 0xd9, 0x2b, 0x59, 0x84, 0x3b, 0x79, 0x40, 0xdc, 0x49, 0xc8, 0x78, 0x23, 0xc7, 0x8f, 0xf2, + 0x46, 0xa2, 0x3f, 0x75, 0xe0, 0x74, 0x44, 0x78, 0x8e, 0x46, 0xac, 0x06, 0x76, 0x8e, 0xc9, 0xcb, + 0x8a, 0x8d, 0x6a, 0xd4, 0xaa, 0xec, 0x04, 0xce, 0x72, 0xe1, 0x8a, 0x02, 0x91, 0xb3, 0xef, 0x68, + 0xbf, 0x93, 0x07, 0xfc, 0xe4, 0x5b, 0x13, 0x13, 0x9d, 0xa5, 0xd1, 0x15, 0x71, 0xfa, 0xe6, 0xfd, + 0x8d, 0xb7, 0x26, 0xc6, 0xe4, 0xef, 0x74, 0xd1, 0x3a, 0x26, 0x49, 0xcf, 0xbd, 0x56, 0x58, 0x5d, + 0x5c, 0x13, 0x79, 0x53, 0xea, 0xdc, 0x5b, 0xa3, 0x40, 0xcc, 0xdb, 0xd0, 0xd3, 0x30, 0x50, 0xf5, + 0x48, 0x33, 0x0c, 0x54, 0x5d, 0x51, 0xa6, 0x3d, 0xcf, 0x09, 0x18, 0x56, 0xad, 0xa8, 0x01, 0x7d, + 0x3e, 0x33, 0xd1, 0x45, 0x92, 0xa4, 0x05, 0xbf, 0x00, 0x37, 0xf9, 0x65, 0x8a, 0x24, 0x13, 0xc2, + 0x82, 0x87, 0x2e, 0xf5, 0x4f, 0x9d, 0x8c, 0xd4, 0x7f, 0x1a, 0x06, 0x2a, 0x75, 0xbf, 0x51, 0x8d, + 0x48, 0x50, 0x1a, 0x63, 0xb6, 0x2a, 0x5b, 0x89, 0x59, 0x01, 0xc3, 0xaa, 0x15, 0xfd, 0x25, 0x18, + 0x09, 0xdb, 0x09, 0x7b, 0xc9, 0xe9, 0xf3, 0x8f, 0x4b, 0xa7, 0x19, 0x3a, 0x4b, 0x79, 0x59, 0xd5, + 0x1b, 0xb0, 0x89, 0x47, 0x85, 0x6d, 0x3d, 0x8c, 0x59, 0xad, 0x12, 0x26, 0x6c, 0xcf, 0x9b, 0xc2, + 0xf6, 0xaa, 0xd6, 0x86, 0x0d, 0x4c, 0xf4, 0x55, 0x07, 0x4e, 0x37, 0xb3, 0xa6, 0x4b, 0xe9, 0x02, + 0x5b, 0x99, 0xb2, 0x0d, 0x15, 0x37, 0x43, 0x9a, 0x67, 0x06, 0x77, 0x80, 0x71, 0xe7, 0x20, 0x58, + 0xd5, 0xa0, 0x78, 0x37, 0xa8, 0xd4, 0xa3, 0x30, 0x30, 0x87, 0xf7, 0xb0, 0xad, 0x9b, 0x3c, 0xec, + 0x2d, 0xcb, 0x63, 0x31, 0xf3, 0xf0, 0xfe, 0xde, 0xc4, 0xb9, 0xdc, 0x26, 0x9c, 0x3f, 0xa8, 0xf1, + 0x39, 0x38, 0x9f, 0xff, 0xa6, 0xde, 0x4d, 0xd7, 0xee, 0xd1, 0x75, 0xed, 0x79, 0x78, 0xb8, 0xeb, + 0xa0, 0xa8, 0xcc, 0x97, 0x8a, 0x99, 0x63, 0xca, 0xfc, 0x0e, 0x45, 0x6a, 0x14, 0x86, 0xf5, 0x82, + 0xf6, 0xee, 0xff, 0xe9, 0x01, 0x48, 0x3d, 0xc4, 0xc8, 0x83, 0x51, 0xee, 0x8d, 0x5e, 0x9c, 0xbb, + 0xe7, 0x6b, 0xc1, 0xb3, 0x06, 0x01, 0x9c, 0x21, 0x88, 0x9a, 0x80, 0x38, 0x84, 0xff, 0xbe, 0x97, + 0xa8, 0x22, 0x0b, 0xc2, 0xcd, 0x76, 0x10, 0xc1, 0x39, 0x84, 0xe9, 0x8c, 0x92, 0x70, 0x8b, 0x04, + 0xd7, 0xf1, 0xf2, 0xbd, 0xdc, 0x2d, 0xe7, 0x71, 0x28, 0x83, 0x00, 0xce, 0x10, 0x44, 0x2e, 0xf4, + 0x31, 0xaf, 0x84, 0x4c, 0x2b, 0x66, 0xe2, 0x85, 0x9d, 0xf9, 0x31, 0x16, 0x2d, 0xe8, 0xcb, 0x0e, + 0x8c, 0xca, 0x2b, 0xf2, 0xcc, 0x0f, 0x28, 0x13, 0x8a, 0xaf, 0xdb, 0xf2, 0xf0, 0x5f, 0xd1, 0xa9, + 0xa7, 0xe9, 0x7a, 0x06, 0x38, 0xc6, 0x99, 0x41, 0xb8, 0x2f, 0xc1, 0x99, 0x9c, 0xee, 0x56, 0x6c, + 0xb9, 0xef, 0x3a, 0x30, 0xa4, 0x15, 0x62, 0x43, 0xb7, 0x61, 0x30, 0x2c, 0x5b, 0xcf, 0x11, 0x5b, + 0x2d, 0x77, 0xe4, 0x88, 0x29, 0x10, 0x4e, 0x19, 0x1e, 0x26, 0xb5, 0x2d, 0xb7, 0x6a, 0xdc, 0x03, + 0x1e, 0xf6, 0x91, 0x53, 0xdb, 0xfe, 0x5d, 0x2f, 0xa4, 0x94, 0x8e, 0x58, 0xba, 0x21, 0x4d, 0x84, + 0x2b, 0x1c, 0x98, 0x08, 0x57, 0x85, 0x53, 0x1e, 0x8b, 0xa2, 0xde, 0x63, 0xc1, 0x06, 0x5e, 0x87, + 0xd3, 0xa4, 0x80, 0xb3, 0x24, 0x29, 0x97, 0x38, 0xed, 0xca, 0xb8, 0xf4, 0x1e, 0x99, 0x4b, 0xd9, + 0xa4, 0x80, 0xb3, 0x24, 0xd1, 0x2b, 0x50, 0xaa, 0xb0, 0xdb, 0x8e, 0x7c, 0x8e, 0x8b, 0x9b, 0xd7, + 0xc2, 0x64, 0x2d, 0x22, 0x31, 0x09, 0x12, 0x51, 0x69, 0xe9, 0x71, 0xb1, 0x0a, 0xa5, 0xd9, 0x2e, + 0x78, 0xb8, 0x2b, 0x05, 0x6a, 0x30, 0xb0, 0x30, 0xac, 0x9f, 0xec, 0x32, 0x21, 0x22, 0xe2, 0xd3, + 0xca, 0x60, 0x28, 0xeb, 0x8d, 0xd8, 0xc4, 0x45, 0xbf, 0xe2, 0xc0, 0x48, 0x43, 0x3a, 0xaa, 0x71, + 0xbb, 0x21, 0xcb, 0x06, 0x62, 0x2b, 0xdb, 0x6f, 0x59, 0xa7, 0xcc, 0x75, 0x09, 0x03, 0x84, 0x4d, + 0xde, 0xee, 0x9b, 0x0e, 0x8c, 0x65, 0xbb, 0xa1, 0x2d, 0x78, 0xac, 0xe9, 0x45, 0x5b, 0x8b, 0xc1, + 0x66, 0xc4, 0xee, 0x01, 0x24, 0xfc, 0xa9, 0x4e, 0x6f, 0x26, 0x24, 0x9a, 0xf3, 0x76, 0x79, 0x04, + 0xaf, 0xa8, 0x3e, 0x20, 0xf3, 0xd8, 0xca, 0x41, 0xc8, 0xf8, 0x60, 0x5a, 0xa8, 0x0c, 0xe7, 0x28, + 0x02, 0x2b, 0x7a, 0xe5, 0x87, 0x41, 0xca, 0xa4, 0xc0, 0x98, 0xa8, 0x7c, 0xb6, 0x95, 0x3c, 0x24, + 0x9c, 0xdf, 0xd7, 0x1d, 0x80, 0x3e, 0x7e, 0x07, 0xca, 0xfd, 0xb7, 0x05, 0x90, 0x4a, 0xda, 0x5f, + 0xec, 0xa0, 0x10, 0x3d, 0xd0, 0x22, 0xe6, 0x68, 0x11, 0x3e, 0x00, 0x76, 0xa0, 0x89, 0x0a, 0x71, + 0xa2, 0x85, 0x6a, 0xaf, 0xe4, 0x96, 0x9f, 0xcc, 0x86, 0x55, 0x69, 0xf9, 0x33, 0xed, 0xf5, 0x8a, + 0x80, 0x61, 0xd5, 0xea, 0x7e, 0xca, 0x81, 0x11, 0x3a, 0xcb, 0x46, 0x83, 0x34, 0xca, 0x09, 0x69, + 0xc5, 0x28, 0x86, 0x62, 0x4c, 0xff, 0xb1, 0xe7, 0xc1, 0x4a, 0xaf, 0xbe, 0x91, 0x96, 0x16, 0x32, + 0xa0, 0x4c, 0x30, 0xe7, 0xe5, 0x7e, 0xa7, 0x07, 0x06, 0xd5, 0x62, 0x1f, 0x22, 0x0e, 0x71, 0x39, + 0x2d, 0xde, 0xc8, 0xa5, 0x61, 0x49, 0x2b, 0xdc, 0x48, 0xcd, 0xf5, 0xe9, 0x60, 0x97, 0xdf, 0xb1, + 0x4f, 0xab, 0x38, 0x3e, 0x63, 0x06, 0x3c, 0xcf, 0xeb, 0x51, 0x34, 0x0d, 0x5f, 0x44, 0x3e, 0x6f, + 0xe9, 0xf1, 0xe6, 0x5e, 0x5b, 0x27, 0x8b, 0x0a, 0xa6, 0x75, 0x0f, 0x34, 0x67, 0xbe, 0xeb, 0x51, + 0x3c, 0xd4, 0x77, 0x3d, 0x2e, 0x41, 0x2f, 0x09, 0xda, 0x4d, 0xa6, 0xb6, 0x0c, 0x32, 0x75, 0xbd, + 0xf7, 0x4a, 0xd0, 0x6e, 0x9a, 0x33, 0x63, 0x28, 0xe8, 0x7d, 0x30, 0x54, 0x25, 0x71, 0x25, 0xf2, + 0xd9, 0xc5, 0x71, 0xe1, 0xef, 0x78, 0x94, 0x39, 0x91, 0x52, 0xb0, 0xd9, 0x51, 0xef, 0xe0, 0xbe, + 0x06, 0x7d, 0x6b, 0x8d, 0x76, 0xcd, 0x0f, 0x50, 0x0b, 0xfa, 0xf8, 0x35, 0x72, 0x71, 0xf2, 0x5a, + 0xb0, 0x01, 0xf9, 0xdb, 0xae, 0xe5, 0x42, 0xf0, 0x1b, 0x90, 0x82, 0x8f, 0xfb, 0xcf, 0x1c, 0xa0, + 0x06, 0xeb, 0xc2, 0x2c, 0xfa, 0xab, 0x1d, 0x9f, 0xb1, 0xf8, 0xb9, 0x9c, 0xcf, 0x58, 0x8c, 0x30, + 0xe4, 0x9c, 0x2f, 0x58, 0x34, 0x60, 0x84, 0xb9, 0xe8, 0xe5, 0x79, 0x24, 0x54, 0xdc, 0xe7, 0x0e, + 0x79, 0xf3, 0x5a, 0xef, 0x2a, 0xa4, 0xb3, 0x0e, 0xc2, 0x26, 0x71, 0xf7, 0x77, 0x7a, 0x41, 0xf3, + 0x64, 0x1f, 0x62, 0x7b, 0x7f, 0x34, 0x13, 0xb7, 0x58, 0xb1, 0x12, 0xb7, 0x90, 0xc1, 0x00, 0x2e, + 0x32, 0xcc, 0x50, 0x05, 0x1d, 0x54, 0x9d, 0x34, 0x5a, 0xe2, 0xe5, 0x50, 0x83, 0xba, 0x4a, 0x1a, + 0x2d, 0xcc, 0x5a, 0xd4, 0x1d, 0xb2, 0xde, 0xae, 0x77, 0xc8, 0xea, 0x50, 0xac, 0x79, 0xed, 0x1a, + 0x11, 0x89, 0x7b, 0x16, 0x42, 0x54, 0x2c, 0xa9, 0x9e, 0x87, 0xa8, 0xd8, 0xbf, 0x98, 0x33, 0xa0, + 0x6f, 0x67, 0x5d, 0x66, 0x32, 0x08, 0x5f, 0xa3, 0x85, 0xb7, 0x53, 0x25, 0x47, 0xf0, 0xb7, 0x53, + 0xfd, 0xc4, 0x29, 0x33, 0xd4, 0x82, 0xfe, 0x0a, 0x2f, 0xd8, 0x20, 0x0e, 0xfc, 0x45, 0x1b, 0x97, + 0xe4, 0x18, 0x41, 0xee, 0x8a, 0x10, 0x3f, 0xb0, 0x64, 0xe3, 0x4e, 0xc1, 0x90, 0x56, 0xfe, 0x9e, + 0x3e, 0x06, 0x55, 0x2b, 0x40, 0x7b, 0x0c, 0x73, 0x5e, 0xe2, 0x61, 0xd6, 0xe2, 0x7e, 0xb3, 0x17, + 0x94, 0x4b, 0x48, 0xbf, 0xd2, 0xe5, 0x55, 0xb4, 0xca, 0x26, 0xc6, 0x5d, 0xe2, 0x30, 0xc0, 0xa2, + 0x95, 0x2a, 0x45, 0x4d, 0x12, 0xd5, 0x94, 0x11, 0x2a, 0xe4, 0xab, 0x52, 0x8a, 0x56, 0xf4, 0x46, + 0x6c, 0xe2, 0x52, 0x8d, 0xb6, 0x29, 0x22, 0xbb, 0xd9, 0xbc, 0x59, 0x19, 0xf1, 0xc5, 0x0a, 0x03, + 0x7d, 0xca, 0x81, 0xe1, 0xa6, 0x16, 0x08, 0x16, 0xf9, 0x7b, 0x36, 0x02, 0x17, 0x1a, 0x55, 0x9e, + 0x67, 0xa3, 0x43, 0xb0, 0xc1, 0x15, 0x2d, 0xc0, 0xe9, 0x98, 0x24, 0xab, 0x3b, 0x01, 0x89, 0xd4, + 0x55, 0x6b, 0x71, 0xf7, 0x5e, 0x25, 0xcd, 0x97, 0xb3, 0x08, 0xb8, 0xb3, 0x4f, 0x6e, 0xca, 0x63, + 0xf1, 0xc8, 0x29, 0x8f, 0x73, 0x30, 0xb6, 0xe9, 0xf9, 0x8d, 0x76, 0x44, 0xba, 0x26, 0x4e, 0xce, + 0x67, 0xda, 0x71, 0x47, 0x0f, 0x76, 0x6f, 0xa3, 0xe1, 0xd5, 0xe2, 0x52, 0xbf, 0x76, 0x6f, 0x83, + 0x02, 0x30, 0x87, 0xbb, 0xff, 0xd8, 0x01, 0x5e, 0xf4, 0x64, 0x7a, 0x73, 0xd3, 0x0f, 0xfc, 0x64, + 0x17, 0x7d, 0xcd, 0x81, 0xb1, 0x20, 0xac, 0x92, 0xe9, 0x20, 0xf1, 0x25, 0xd0, 0x5e, 0xad, 0x67, + 0xc6, 0xeb, 0x5a, 0x86, 0x3c, 0xbf, 0x41, 0x9f, 0x85, 0xe2, 0x8e, 0x61, 0xb8, 0x17, 0xe0, 0x5c, + 0x2e, 0x01, 0xf7, 0xcd, 0x1e, 0x30, 0x6b, 0xb7, 0xa0, 0x17, 0xa1, 0xd8, 0x60, 0xd5, 0x04, 0x9c, + 0x7b, 0x2c, 0xca, 0xc3, 0xd6, 0x8a, 0x97, 0x1b, 0xe0, 0x94, 0xd0, 0x1c, 0x0c, 0xb1, 0x82, 0x30, + 0xa2, 0xd6, 0x03, 0x7f, 0x23, 0xdc, 0xf4, 0x13, 0x53, 0xaa, 0xe9, 0x8e, 0xf9, 0x13, 0xeb, 0xdd, + 0xd0, 0xeb, 0xd0, 0xbf, 0xc1, 0x2b, 0xd5, 0xd9, 0x0b, 0x5d, 0x89, 0xd2, 0x77, 0x4c, 0x99, 0x91, + 0x75, 0xf0, 0xee, 0xa4, 0xff, 0x62, 0xc9, 0x11, 0xed, 0xc2, 0x80, 0x27, 0x9f, 0x69, 0xaf, 0xad, + 0x3c, 0x7c, 0x63, 0xff, 0x88, 0x44, 0x0b, 0xf9, 0x0c, 0x15, 0xbb, 0x4c, 0x46, 0x4a, 0xf1, 0x50, + 0x19, 0x29, 0xdf, 0x76, 0x00, 0xd2, 0x2a, 0xfd, 0xe8, 0x16, 0x0c, 0xc4, 0xcf, 0x19, 0x56, 0xbe, + 0x8d, 0xcb, 0xd3, 0x82, 0xa2, 0x76, 0xc1, 0x50, 0x40, 0xb0, 0xe2, 0x76, 0x37, 0xcf, 0xc4, 0xcf, + 0x1c, 0x38, 0x9b, 0xf7, 0x35, 0x81, 0x07, 0x38, 0xe2, 0xa3, 0x3a, 0x25, 0x44, 0x87, 0xb5, 0x88, + 0x6c, 0xfa, 0xb7, 0xb2, 0x49, 0x2b, 0x4b, 0xb2, 0x01, 0xa7, 0x38, 0xee, 0xf7, 0xfa, 0x40, 0x31, + 0x3e, 0x26, 0x27, 0xc6, 0x53, 0xd4, 0xc8, 0xa9, 0xa5, 0x15, 0x14, 0x15, 0x1e, 0x66, 0x50, 0x2c, + 0x5a, 0xa9, 0xa1, 0x23, 0x73, 0xa9, 0x85, 0xc8, 0x66, 0xbb, 0x50, 0xe6, 0x5c, 0x63, 0xd5, 0x9a, + 0xe7, 0x16, 0x29, 0x9e, 0x88, 0x5b, 0xa4, 0xcf, 0xbe, 0x5b, 0xe4, 0x12, 0xf4, 0x47, 0x61, 0x83, + 0x4c, 0xe3, 0x6b, 0x42, 0x7d, 0x4f, 0x6b, 0xdb, 0x72, 0x30, 0x96, 0xed, 0xd9, 0xb2, 0x9a, 0x03, + 0x87, 0x2b, 0xab, 0x89, 0xbe, 0xe7, 0x1c, 0xe0, 0x79, 0x19, 0xb4, 0x75, 0x26, 0xe4, 0x56, 0xb2, + 0x62, 0xb6, 0xc8, 0xbd, 0xb8, 0x73, 0xbe, 0xee, 0xc0, 0x69, 0x12, 0x54, 0xa2, 0x5d, 0x46, 0x47, + 0x50, 0x13, 0xa1, 0xd3, 0xeb, 0x36, 0x5e, 0xbe, 0x2b, 0x59, 0xe2, 0x3c, 0x2e, 0xd2, 0x01, 0xc6, + 0x9d, 0xc3, 0x70, 0x7f, 0x52, 0x80, 0x33, 0x39, 0x14, 0xd8, 0x35, 0x99, 0x26, 0xdd, 0x40, 0x8b, + 0xd5, 0xec, 0xeb, 0xb3, 0x24, 0xe0, 0x58, 0x61, 0xa0, 0x35, 0x38, 0xbb, 0xd5, 0x8c, 0x53, 0x2a, + 0xb3, 0x61, 0x90, 0x90, 0x5b, 0xf2, 0x65, 0x92, 0x51, 0xd0, 0xb3, 0x4b, 0x39, 0x38, 0x38, 0xb7, + 0x27, 0xd5, 0x36, 0x48, 0xe0, 0x6d, 0x34, 0x48, 0xda, 0x24, 0x2e, 0x79, 0x29, 0x6d, 0xe3, 0x4a, + 0xa6, 0x1d, 0x77, 0xf4, 0x40, 0x9f, 0x73, 0xe0, 0x91, 0x98, 0x44, 0xdb, 0x24, 0x2a, 0xfb, 0x55, + 0x32, 0xdb, 0x8e, 0x93, 0xb0, 0x49, 0xa2, 0x7b, 0x74, 0x0d, 0x4e, 0xec, 0xef, 0x4d, 0x3c, 0x52, + 0xee, 0x4e, 0x0d, 0x1f, 0xc4, 0xca, 0xfd, 0x9c, 0x03, 0xa3, 0x65, 0x66, 0xac, 0x2a, 0xd5, 0xd7, + 0x76, 0xe9, 0xc1, 0xa7, 0x54, 0x49, 0x81, 0x8c, 0x10, 0x33, 0x8b, 0x00, 0xb8, 0xaf, 0xc2, 0x58, + 0x99, 0x34, 0xbd, 0x56, 0x9d, 0xdd, 0xd0, 0xe4, 0x69, 0x3a, 0x53, 0x30, 0x18, 0x4b, 0x58, 0xf6, + 0x7b, 0x1e, 0x0a, 0x19, 0xa7, 0x38, 0xe8, 0x49, 0x9e, 0x52, 0x24, 0xef, 0x79, 0x0c, 0x72, 0x23, + 0x81, 0xe7, 0x21, 0xc5, 0x58, 0xb6, 0xb9, 0x3b, 0x30, 0x9c, 0x76, 0x27, 0x9b, 0xa8, 0x06, 0xa7, + 0x2a, 0xda, 0x15, 0xa9, 0x34, 0x39, 0xfd, 0xf0, 0xb7, 0xa9, 0x78, 0x8d, 0x52, 0x93, 0x08, 0xce, + 0x52, 0x75, 0xbf, 0x58, 0x80, 0x53, 0x8a, 0xb3, 0x08, 0x79, 0xbd, 0x91, 0x4d, 0x83, 0xc2, 0x36, + 0x4a, 0x9d, 0x98, 0x2b, 0x79, 0x40, 0x2a, 0xd4, 0x1b, 0xd9, 0x54, 0xa8, 0x63, 0x65, 0xdf, 0x11, + 0xc5, 0xfb, 0x76, 0x01, 0x06, 0x54, 0xe1, 0x95, 0x17, 0xa1, 0xc8, 0xec, 0xb8, 0xfb, 0xd3, 0x46, + 0x99, 0x4d, 0x88, 0x39, 0x25, 0x4a, 0x92, 0x65, 0x72, 0xdc, 0x73, 0xd5, 0xc9, 0x41, 0xee, 0x7e, + 0xf3, 0xa2, 0x04, 0x73, 0x4a, 0x68, 0x09, 0x7a, 0x48, 0x50, 0x15, 0x6a, 0xe9, 0xd1, 0x09, 0xb2, + 0xef, 0xe8, 0x5c, 0x09, 0xaa, 0x98, 0x52, 0x61, 0xa5, 0x0f, 0xb9, 0xf6, 0x91, 0xf9, 0x6a, 0x82, + 0x50, 0x3d, 0x44, 0xab, 0xfb, 0x2b, 0x3d, 0xd0, 0x57, 0x6e, 0x6f, 0x50, 0x05, 0xfb, 0x5b, 0x0e, + 0x9c, 0xd9, 0xc9, 0x54, 0x49, 0x4d, 0xb7, 0xec, 0x75, 0x7b, 0x2e, 0x48, 0x3d, 0x9b, 0xe8, 0x11, + 0xf9, 0x49, 0xe8, 0x9c, 0x46, 0x9c, 0x37, 0x1c, 0xa3, 0x2a, 0x62, 0xcf, 0xb1, 0x54, 0x45, 0xbc, + 0x75, 0xcc, 0xe9, 0xeb, 0x23, 0xdd, 0x52, 0xd7, 0xdd, 0xdf, 0x29, 0x02, 0xf0, 0xa7, 0xb1, 0xda, + 0x4a, 0x0e, 0xe3, 0xa3, 0x7a, 0x1e, 0x86, 0xe5, 0x37, 0xea, 0xf3, 0x3e, 0xc8, 0xb1, 0xa0, 0xb5, + 0x61, 0x03, 0x93, 0x19, 0x04, 0x41, 0x12, 0xed, 0x72, 0xa5, 0x31, 0x9b, 0xa2, 0xae, 0x5a, 0xb0, + 0x86, 0x85, 0x26, 0x0d, 0x9f, 0x3f, 0x0f, 0xe5, 0x8e, 0x1e, 0xe0, 0xa2, 0x7f, 0x1f, 0x8c, 0x9a, + 0xb5, 0x1a, 0x84, 0xa6, 0xa4, 0x42, 0xaf, 0x66, 0x89, 0x07, 0x9c, 0xc1, 0xa6, 0x9b, 0xb8, 0x1a, + 0xed, 0xe2, 0x76, 0x20, 0x54, 0x26, 0xb5, 0x89, 0xe7, 0x18, 0x14, 0x8b, 0x56, 0x76, 0x51, 0x9e, + 0x9d, 0x46, 0x1c, 0x2e, 0x2e, 0xdb, 0xa7, 0x17, 0xe5, 0xb5, 0x36, 0x6c, 0x60, 0x52, 0x0e, 0xc2, + 0xc7, 0x07, 0xe6, 0x6b, 0x92, 0x71, 0xcc, 0xb5, 0x60, 0x34, 0x34, 0x7d, 0x13, 0x3c, 0x5f, 0xeb, + 0x5d, 0x87, 0xdc, 0x7a, 0x46, 0x5f, 0x1e, 0x32, 0xcf, 0xb8, 0x32, 0x32, 0xf4, 0xa9, 0xce, 0xa8, + 0x67, 0x72, 0x0f, 0x9b, 0xa9, 0x86, 0x5d, 0x93, 0xad, 0xd7, 0xe0, 0x6c, 0x2b, 0xac, 0xae, 0x45, + 0x7e, 0x18, 0xf9, 0xc9, 0xee, 0x6c, 0xc3, 0x8b, 0x63, 0xb6, 0x31, 0x46, 0x4c, 0xe5, 0x64, 0x2d, + 0x07, 0x07, 0xe7, 0xf6, 0xa4, 0xda, 0x7d, 0x4b, 0x00, 0x59, 0x9a, 0x51, 0x91, 0x6b, 0xf7, 0x12, + 0x11, 0xab, 0x56, 0xf7, 0x0c, 0x9c, 0x2e, 0xb7, 0x5b, 0xad, 0x86, 0x4f, 0xaa, 0xca, 0xa7, 0xee, + 0xbe, 0x1f, 0x4e, 0x89, 0x9a, 0x89, 0x4a, 0x15, 0x38, 0x52, 0x85, 0x5f, 0xf7, 0x4f, 0x1d, 0x38, + 0x95, 0x49, 0xea, 0x40, 0xaf, 0x67, 0x0f, 0x70, 0x2b, 0x0e, 0x2b, 0xfd, 0xec, 0xe6, 0x2f, 0x69, + 0xae, 0x32, 0x50, 0x97, 0xc9, 0xcb, 0xd6, 0xee, 0x00, 0xb0, 0x14, 0x5f, 0x7e, 0x22, 0xe8, 0x19, + 0xd0, 0xee, 0x67, 0x0b, 0x90, 0x9f, 0x49, 0x83, 0x3e, 0xd6, 0xb9, 0x00, 0x2f, 0x5a, 0x5c, 0x00, + 0x91, 0xca, 0xd3, 0x7d, 0x0d, 0x02, 0x73, 0x0d, 0x56, 0x2c, 0xad, 0x81, 0xe0, 0xdb, 0xb9, 0x12, + 0xff, 0xd3, 0x81, 0xa1, 0xf5, 0xf5, 0x65, 0xe5, 0x5f, 0xc2, 0x70, 0x3e, 0xe6, 0x77, 0x9c, 0x59, + 0x9c, 0x72, 0x36, 0x6c, 0xb6, 0x78, 0xd8, 0x52, 0x84, 0x53, 0x59, 0xf9, 0xca, 0x72, 0x2e, 0x06, + 0xee, 0xd2, 0x13, 0x2d, 0xc2, 0x19, 0xbd, 0xa5, 0xac, 0x7d, 0x8f, 0xab, 0x28, 0xea, 0x8a, 0x74, + 0x36, 0xe3, 0xbc, 0x3e, 0x59, 0x52, 0xc2, 0x55, 0x28, 0xbe, 0x7f, 0xdf, 0x41, 0x4a, 0x34, 0xe3, + 0xbc, 0x3e, 0xee, 0x2a, 0x0c, 0xad, 0x7b, 0x91, 0x9a, 0xf8, 0x07, 0x60, 0xac, 0x12, 0x36, 0xa5, + 0x8b, 0x66, 0x99, 0x6c, 0x93, 0x86, 0x98, 0x32, 0xaf, 0x9a, 0x9f, 0x69, 0xc3, 0x1d, 0xd8, 0xee, + 0x7f, 0xbb, 0x08, 0xea, 0xce, 0xd6, 0x21, 0x4e, 0x98, 0x96, 0xca, 0x31, 0x2c, 0x5a, 0xce, 0x31, + 0x54, 0xb2, 0x36, 0x93, 0x67, 0x98, 0xa4, 0x79, 0x86, 0x7d, 0xb6, 0xf3, 0x0c, 0x95, 0xc2, 0xd8, + 0x91, 0x6b, 0xf8, 0x15, 0x07, 0x86, 0x83, 0xb0, 0x4a, 0x54, 0x30, 0xaa, 0x9f, 0x69, 0xad, 0xaf, + 0xd8, 0x4b, 0x9e, 0xe6, 0x39, 0x73, 0x82, 0x3c, 0xcf, 0x44, 0x55, 0x47, 0x94, 0xde, 0x84, 0x8d, + 0x71, 0xa0, 0x79, 0xcd, 0x69, 0xc8, 0x7d, 0xf3, 0x8f, 0xe6, 0x59, 0x0f, 0x77, 0xf5, 0x00, 0xde, + 0xd2, 0xf4, 0xa6, 0x41, 0xeb, 0x1f, 0x8b, 0x4f, 0x43, 0x0c, 0xb2, 0x02, 0x6b, 0xaa, 0x4f, 0xb9, + 0xd0, 0xc7, 0x53, 0x56, 0x45, 0x05, 0x1b, 0x16, 0xf9, 0xe2, 0xe9, 0xac, 0x58, 0xb4, 0xa0, 0x44, + 0x06, 0xbc, 0x87, 0x6c, 0xd5, 0x53, 0x37, 0x02, 0xea, 0xf9, 0x11, 0x6f, 0xf4, 0x82, 0x6e, 0x94, + 0x0e, 0x1f, 0xc6, 0x28, 0x1d, 0xe9, 0x6a, 0x90, 0x7e, 0xc1, 0x81, 0xe1, 0x8a, 0x56, 0xdf, 0xbc, + 0xf4, 0xb4, 0xad, 0x2f, 0xa5, 0xe6, 0x95, 0xa1, 0xe7, 0x01, 0x15, 0xa3, 0x9e, 0xba, 0xc1, 0x9d, + 0x95, 0xdc, 0x63, 0x16, 0x38, 0x3b, 0xfa, 0xad, 0xdc, 0xd4, 0x37, 0x2d, 0x7a, 0x99, 0xc4, 0x47, + 0x61, 0x58, 0xf0, 0x42, 0xb7, 0x61, 0x40, 0x66, 0x3d, 0x8b, 0x9c, 0x64, 0x6c, 0xc3, 0xc3, 0x6d, + 0x86, 0xd1, 0x64, 0xa1, 0x2e, 0x0e, 0xc5, 0x8a, 0x23, 0xaa, 0x43, 0x4f, 0xd5, 0xab, 0x89, 0xec, + 0xe4, 0x15, 0x3b, 0x75, 0x10, 0x25, 0x4f, 0x66, 0x5e, 0xcd, 0x4d, 0x2f, 0x60, 0xca, 0x02, 0xdd, + 0x4a, 0x0b, 0x44, 0x8f, 0x59, 0x3b, 0x7d, 0x4d, 0x35, 0x89, 0xfb, 0x18, 0x3a, 0xea, 0x4d, 0x57, + 0x45, 0xe4, 0xf1, 0xff, 0x63, 0x6c, 0xe7, 0xed, 0x14, 0x52, 0xe4, 0x95, 0x1f, 0xd2, 0xe8, 0x25, + 0xe5, 0xc2, 0xbe, 0xd0, 0xfe, 0xf3, 0xb6, 0xb8, 0xb0, 0xfa, 0x05, 0xd9, 0x2f, 0xb3, 0x37, 0xa0, + 0xaf, 0xc5, 0xb2, 0x18, 0x4a, 0xbf, 0x60, 0xeb, 0x6c, 0xe1, 0x59, 0x11, 0x7c, 0x6f, 0xf2, 0xff, + 0xb1, 0xe0, 0x81, 0xae, 0x40, 0x3f, 0xff, 0xce, 0x01, 0xcf, 0x0e, 0x1f, 0xba, 0x3c, 0xde, 0xfd, + 0x6b, 0x09, 0xe9, 0x41, 0xc1, 0x7f, 0xc7, 0x58, 0xf6, 0x45, 0x5f, 0x74, 0x60, 0x94, 0x4a, 0xd4, + 0xf4, 0xc3, 0x0c, 0x25, 0x64, 0x4b, 0x66, 0x5d, 0x8f, 0xa9, 0x46, 0x22, 0x65, 0x8d, 0x32, 0x93, + 0x16, 0x0d, 0x76, 0x38, 0xc3, 0x1e, 0xbd, 0x01, 0x03, 0xb1, 0x5f, 0x25, 0x15, 0x2f, 0x8a, 0x4b, + 0x67, 0x8e, 0x67, 0x28, 0x69, 0xac, 0x43, 0x30, 0xc2, 0x8a, 0x65, 0xee, 0x17, 0xca, 0xcf, 0x3e, + 0xe0, 0x2f, 0x94, 0xff, 0x75, 0x07, 0xce, 0xf1, 0xba, 0xdc, 0xd9, 0xa2, 0xec, 0xe7, 0xee, 0xd1, + 0xbd, 0xc2, 0xd2, 0xda, 0xa7, 0xf3, 0x48, 0xe2, 0x7c, 0x4e, 0xac, 0xb0, 0xa7, 0xf9, 0x1d, 0x8d, + 0xf3, 0x56, 0x63, 0x7e, 0x87, 0xff, 0x76, 0x06, 0x7a, 0x16, 0x86, 0x5a, 0xe2, 0x38, 0xf4, 0xe3, + 0x26, 0xbb, 0xa4, 0xd0, 0xc3, 0x2f, 0x72, 0xad, 0xa5, 0x60, 0xac, 0xe3, 0x18, 0x55, 0x5e, 0x2f, + 0x1d, 0x54, 0xe5, 0x15, 0x5d, 0x87, 0xa1, 0x24, 0x6c, 0x90, 0x48, 0x58, 0xaa, 0x25, 0xb6, 0x03, + 0x2f, 0xe6, 0xbd, 0x5b, 0xeb, 0x0a, 0x2d, 0xb5, 0x64, 0x53, 0x58, 0x8c, 0x75, 0x3a, 0x2c, 0x31, + 0x54, 0xd4, 0x3b, 0x8f, 0x98, 0x09, 0xfb, 0x70, 0x26, 0x31, 0x54, 0x6f, 0xc4, 0x26, 0x2e, 0x5a, + 0x80, 0xd3, 0xad, 0x0e, 0x1b, 0x98, 0x5f, 0x53, 0x52, 0xe9, 0x04, 0x9d, 0x06, 0x70, 0x67, 0x1f, + 0xc3, 0xfa, 0x7d, 0xe4, 0x20, 0xeb, 0xb7, 0x4b, 0xcd, 0xd3, 0x47, 0xef, 0xa5, 0xe6, 0x29, 0xaa, + 0xc2, 0xa3, 0x5e, 0x3b, 0x09, 0x59, 0x8d, 0x0e, 0xb3, 0x0b, 0xcf, 0x91, 0x7d, 0x9c, 0xa7, 0xdd, + 0xee, 0xef, 0x4d, 0x3c, 0x3a, 0x7d, 0x00, 0x1e, 0x3e, 0x90, 0x0a, 0x7a, 0x0d, 0x06, 0x88, 0xa8, + 0xdb, 0x5a, 0xfa, 0x39, 0x5b, 0x4a, 0x82, 0x59, 0x09, 0x56, 0xa6, 0x3c, 0x72, 0x18, 0x56, 0xfc, + 0xd0, 0x3a, 0x0c, 0xd5, 0xc3, 0x38, 0x99, 0x6e, 0xf8, 0x5e, 0x4c, 0xe2, 0xd2, 0x63, 0x6c, 0xd3, + 0xe4, 0xea, 0x5e, 0x57, 0x25, 0x5a, 0xba, 0x67, 0xae, 0xa6, 0x3d, 0xb1, 0x4e, 0x06, 0x11, 0x16, + 0xf9, 0x63, 0x09, 0xc2, 0x32, 0x2a, 0x73, 0x91, 0x4d, 0xec, 0xa9, 0x3c, 0xca, 0x6b, 0x61, 0xb5, + 0x6c, 0x62, 0xab, 0xd0, 0x9f, 0x0e, 0xc4, 0x59, 0x9a, 0xe8, 0x79, 0x18, 0x6e, 0x85, 0xd5, 0x72, + 0x8b, 0x54, 0xd6, 0xbc, 0xa4, 0x52, 0x2f, 0x4d, 0x98, 0x5e, 0xb7, 0x35, 0xad, 0x0d, 0x1b, 0x98, + 0xa8, 0x05, 0xfd, 0x4d, 0x7e, 0x79, 0xbb, 0xf4, 0x84, 0x2d, 0xdb, 0x46, 0xdc, 0x06, 0xe7, 0xfa, + 0x82, 0xf8, 0x81, 0x25, 0x1b, 0xf4, 0x0f, 0x1c, 0x38, 0x95, 0xb9, 0x76, 0x53, 0x7a, 0x87, 0x35, + 0x95, 0xc5, 0x24, 0x3c, 0xf3, 0x14, 0x5b, 0x3e, 0x13, 0x78, 0xa7, 0x13, 0x84, 0xb3, 0x23, 0xe2, + 0xeb, 0xc2, 0x2a, 0x30, 0x94, 0x9e, 0xb4, 0xb7, 0x2e, 0x8c, 0xa0, 0x5c, 0x17, 0xf6, 0x03, 0x4b, + 0x36, 0xe8, 0x12, 0xf4, 0x8b, 0x62, 0x69, 0xa5, 0xa7, 0xcc, 0xf0, 0xad, 0xa8, 0xa9, 0x86, 0x65, + 0xfb, 0xf8, 0xfb, 0xe1, 0x74, 0x87, 0xe9, 0x76, 0xa4, 0x32, 0x00, 0xbf, 0xe1, 0x80, 0x7e, 0x63, + 0xd6, 0xfa, 0xc7, 0x12, 0x9e, 0x87, 0xe1, 0x0a, 0xff, 0xca, 0x19, 0xbf, 0x73, 0xdb, 0x6b, 0xfa, + 0x3f, 0x67, 0xb5, 0x36, 0x6c, 0x60, 0xba, 0x57, 0x01, 0x75, 0x56, 0xb2, 0xbe, 0xa7, 0x1a, 0x33, + 0xff, 0xc8, 0x81, 0x11, 0x43, 0x67, 0xb0, 0x1e, 0xf1, 0x9b, 0x07, 0xd4, 0xf4, 0xa3, 0x28, 0x8c, + 0xf4, 0x6f, 0x57, 0x89, 0xd2, 0xbd, 0xec, 0xbe, 0xd3, 0x4a, 0x47, 0x2b, 0xce, 0xe9, 0xe1, 0xfe, + 0xd3, 0x5e, 0x48, 0x73, 0x7e, 0x55, 0xb9, 0x51, 0xa7, 0x6b, 0xb9, 0xd1, 0x67, 0x60, 0xe0, 0xd5, + 0x38, 0x0c, 0xd6, 0xd2, 0xa2, 0xa4, 0xea, 0x59, 0xbc, 0x50, 0x5e, 0xbd, 0xc6, 0x30, 0x15, 0x06, + 0xc3, 0xfe, 0xe8, 0xbc, 0xdf, 0x48, 0x3a, 0xab, 0x56, 0xbe, 0xf0, 0x22, 0x87, 0x63, 0x85, 0xc1, + 0x3e, 0x63, 0xb5, 0x4d, 0x94, 0x63, 0x3c, 0xfd, 0x8c, 0x15, 0x2f, 0x52, 0xcf, 0xda, 0xd0, 0x14, + 0x0c, 0x2a, 0xa7, 0xba, 0xf0, 0xd4, 0xab, 0x95, 0x52, 0x9e, 0x77, 0x9c, 0xe2, 0x30, 0x85, 0x50, + 0x38, 0x62, 0x85, 0x0b, 0xa5, 0x6c, 0xc3, 0x3c, 0xc9, 0xb8, 0x76, 0xb9, 0x6c, 0x97, 0x60, 0xac, + 0x58, 0xe6, 0x85, 0x3d, 0x07, 0x8f, 0x23, 0xec, 0xa9, 0x27, 0xa0, 0x17, 0x0f, 0x9b, 0x80, 0x6e, + 0xee, 0xed, 0x81, 0x43, 0xed, 0xed, 0x4f, 0xf7, 0x40, 0xff, 0x0d, 0x12, 0xb1, 0x62, 0xcd, 0x97, + 0xa0, 0x7f, 0x9b, 0xff, 0x9b, 0xbd, 0x49, 0x28, 0x30, 0xb0, 0x6c, 0xa7, 0xcf, 0x6d, 0xa3, 0xed, + 0x37, 0xaa, 0x73, 0xe9, 0x5b, 0x9c, 0xd6, 0x79, 0x93, 0x0d, 0x38, 0xc5, 0xa1, 0x1d, 0x6a, 0x54, + 0xb3, 0x6f, 0x36, 0xfd, 0x24, 0x9b, 0x04, 0xb4, 0x20, 0x1b, 0x70, 0x8a, 0x83, 0x9e, 0x82, 0xbe, + 0x9a, 0x9f, 0xac, 0x7b, 0xb5, 0x6c, 0x94, 0x6f, 0x81, 0x41, 0xb1, 0x68, 0x65, 0x61, 0x22, 0x3f, + 0x59, 0x8f, 0x08, 0xf3, 0xec, 0x76, 0x94, 0x14, 0x58, 0xd0, 0xda, 0xb0, 0x81, 0xc9, 0x86, 0x14, + 0x8a, 0x99, 0x89, 0x0c, 0xc8, 0x74, 0x48, 0xb2, 0x01, 0xa7, 0x38, 0x74, 0xff, 0x57, 0xc2, 0x66, + 0xcb, 0x6f, 0x88, 0xdc, 0x5c, 0x6d, 0xff, 0xcf, 0x0a, 0x38, 0x56, 0x18, 0x14, 0x9b, 0x8a, 0x30, + 0x2a, 0x7e, 0xb2, 0x9f, 0x0c, 0x5a, 0x13, 0x70, 0xac, 0x30, 0xdc, 0x1b, 0x30, 0xc2, 0xdf, 0xe4, + 0xd9, 0x86, 0xe7, 0x37, 0x17, 0x66, 0xd1, 0x95, 0x8e, 0x04, 0xf4, 0x4b, 0x39, 0x09, 0xe8, 0xe7, + 0x8c, 0x4e, 0x9d, 0x89, 0xe8, 0xee, 0x8f, 0x0a, 0x30, 0x70, 0x82, 0x5f, 0x5d, 0x3b, 0xf1, 0x6f, + 0x7a, 0xa2, 0x5b, 0x99, 0x2f, 0xae, 0xad, 0xd9, 0xbc, 0x4f, 0x72, 0xe0, 0xd7, 0xd6, 0xfe, 0x4b, + 0x01, 0xce, 0x4b, 0xd4, 0xf4, 0x9b, 0xf6, 0xec, 0x93, 0x41, 0xc7, 0xbf, 0xd0, 0x91, 0xb1, 0xd0, + 0x6b, 0xf6, 0xac, 0xd1, 0x85, 0xd9, 0xae, 0x4b, 0xfd, 0x5a, 0x66, 0xa9, 0xb1, 0x55, 0xae, 0x07, + 0x2f, 0xf6, 0x9f, 0x39, 0x30, 0x9e, 0xbf, 0xd8, 0x27, 0xf0, 0x91, 0xbb, 0x37, 0xcc, 0x8f, 0xdc, + 0xfd, 0x92, 0xbd, 0x2d, 0x66, 0x4e, 0xa5, 0xcb, 0xe7, 0xee, 0xfe, 0xc4, 0x81, 0xb3, 0xb2, 0x03, + 0x3b, 0x3d, 0x67, 0xfc, 0x80, 0x25, 0xa2, 0x1c, 0xff, 0x36, 0xbb, 0x6d, 0x6c, 0xb3, 0x97, 0xed, + 0x4d, 0x5c, 0x9f, 0x47, 0xd7, 0xef, 0xf5, 0xfe, 0xb1, 0x03, 0xa5, 0xbc, 0x0e, 0x27, 0xf0, 0xc8, + 0x5f, 0x37, 0x1f, 0xf9, 0x8d, 0xe3, 0x99, 0x79, 0xf7, 0x07, 0x5e, 0xea, 0xb6, 0x50, 0xa8, 0x21, + 0xf5, 0x2a, 0xc7, 0x56, 0x8c, 0x96, 0xb3, 0xc8, 0x57, 0xd0, 0x1a, 0xd0, 0x17, 0xb3, 0xac, 0x0d, + 0xb1, 0x05, 0xae, 0xda, 0xd0, 0xb6, 0x28, 0x3d, 0xe1, 0x63, 0x67, 0xff, 0x63, 0xc1, 0xc3, 0xfd, + 0x43, 0x07, 0x86, 0x4f, 0xf0, 0xe3, 0x95, 0xa1, 0xf9, 0x90, 0x5f, 0xb0, 0xf7, 0x90, 0xbb, 0x3c, + 0xd8, 0xbd, 0x22, 0x74, 0x7c, 0xcf, 0x0f, 0x7d, 0xc6, 0x51, 0x99, 0x1a, 0x3c, 0x9b, 0xed, 0x43, + 0xf6, 0xc6, 0x71, 0x94, 0x6a, 0x72, 0xe8, 0xeb, 0x99, 0x12, 0x7b, 0x05, 0x5b, 0x75, 0x6b, 0x3a, + 0x46, 0x73, 0x0f, 0xa5, 0xf6, 0xbe, 0xe2, 0x00, 0xf0, 0x71, 0x8a, 0x0a, 0xbd, 0x74, 0x6c, 0x1b, + 0xc7, 0xb6, 0x52, 0x94, 0x09, 0x1f, 0x9a, 0x12, 0x90, 0x69, 0x03, 0xd6, 0x46, 0x72, 0x1f, 0x35, + 0xf4, 0xee, 0xbb, 0x7c, 0xdf, 0x17, 0x1d, 0x38, 0x95, 0x19, 0x6e, 0x4e, 0xff, 0x4d, 0xf3, 0x3b, + 0x5f, 0x16, 0x74, 0x05, 0xb3, 0x6e, 0xab, 0xee, 0x0e, 0xf8, 0x23, 0x17, 0x8c, 0x0f, 0xa1, 0xa2, + 0xd7, 0x61, 0x50, 0xda, 0xf2, 0x72, 0x7b, 0xdb, 0xfc, 0xde, 0xa1, 0x52, 0xd8, 0x25, 0x24, 0xc6, + 0x29, 0xbf, 0x4c, 0x22, 0x58, 0xe1, 0x50, 0x89, 0x60, 0x0f, 0xf6, 0x6b, 0x89, 0xf9, 0x9e, 0xd6, + 0xde, 0x63, 0xf1, 0xb4, 0x3e, 0x6a, 0xdd, 0xd3, 0xfa, 0xd8, 0x09, 0x7b, 0x5a, 0xb5, 0xb0, 0x57, + 0xf1, 0x3e, 0xc2, 0x5e, 0xaf, 0xc3, 0xd9, 0xed, 0xd4, 0x8c, 0x52, 0x3b, 0x49, 0xd4, 0x68, 0xb9, + 0x94, 0xeb, 0x5f, 0xa5, 0x26, 0x61, 0x9c, 0x90, 0x20, 0xd1, 0x0c, 0xb0, 0x34, 0x07, 0xed, 0x46, + 0x0e, 0x39, 0x9c, 0xcb, 0x24, 0x1b, 0xbf, 0xe8, 0x3f, 0x44, 0xfc, 0xe2, 0x3b, 0x0e, 0x9c, 0xf3, + 0x3a, 0xae, 0x04, 0x61, 0xb2, 0x29, 0x92, 0x28, 0x6e, 0xda, 0xd3, 0xcb, 0x0d, 0xf2, 0x22, 0x50, + 0x94, 0xd7, 0x84, 0xf3, 0x07, 0x84, 0x9e, 0x4c, 0x83, 0xc9, 0x3c, 0x73, 0x31, 0x3f, 0xf2, 0xfb, + 0xf5, 0x6c, 0x86, 0x0a, 0xb0, 0xa5, 0xff, 0x88, 0x5d, 0xfb, 0xd1, 0x42, 0x96, 0xca, 0xd0, 0x7d, + 0x64, 0xa9, 0x64, 0x82, 0x49, 0xc3, 0x96, 0x82, 0x49, 0x01, 0x8c, 0xf9, 0x4d, 0xaf, 0x46, 0xd6, + 0xda, 0x8d, 0x06, 0xbf, 0xa3, 0x20, 0xbf, 0x48, 0x99, 0xeb, 0x93, 0x5a, 0x0e, 0x2b, 0x5e, 0x23, + 0xfb, 0xe1, 0x5f, 0x75, 0x17, 0x63, 0x31, 0x43, 0x09, 0x77, 0xd0, 0xa6, 0x1b, 0x96, 0x15, 0x0b, + 0x23, 0x09, 0x5d, 0x6d, 0x96, 0x0a, 0x31, 0xc0, 0x37, 0xec, 0xd5, 0x14, 0x8c, 0x75, 0x1c, 0xb4, + 0x04, 0x83, 0xd5, 0x20, 0x16, 0xb7, 0x1b, 0x4f, 0x31, 0x61, 0xf6, 0x4e, 0x2a, 0x02, 0xe7, 0xae, + 0x95, 0xd5, 0xbd, 0xc6, 0x47, 0x73, 0xea, 0xd0, 0xa9, 0x76, 0x9c, 0xf6, 0x47, 0x2b, 0x8c, 0x98, + 0xf8, 0xe4, 0x0f, 0xcf, 0x50, 0x78, 0xbc, 0x4b, 0x08, 0x64, 0xee, 0x9a, 0xfc, 0x68, 0xd1, 0x88, + 0x60, 0x27, 0xbe, 0xdd, 0x93, 0x52, 0xd0, 0xbe, 0x0c, 0x7a, 0xfa, 0xc0, 0x2f, 0x83, 0xb2, 0x02, + 0x94, 0x49, 0x43, 0x05, 0x3c, 0x2f, 0x5a, 0x2b, 0x40, 0x99, 0xe6, 0xfe, 0x89, 0x02, 0x94, 0x29, + 0x00, 0xeb, 0x2c, 0xd1, 0x6a, 0xb7, 0xc0, 0xef, 0x19, 0x26, 0x34, 0x8e, 0x1e, 0xc6, 0xd5, 0x23, + 0x80, 0x67, 0x0f, 0x8c, 0x00, 0x76, 0x44, 0x2c, 0xcf, 0x1d, 0x21, 0x62, 0x59, 0x67, 0xa5, 0x01, + 0x17, 0x66, 0x45, 0x90, 0xd8, 0x82, 0xc5, 0xc2, 0xca, 0x2e, 0xf0, 0x5c, 0x4a, 0xf6, 0x2f, 0xe6, + 0x0c, 0xba, 0xa6, 0x08, 0x5f, 0xb8, 0xe7, 0x14, 0x61, 0x2a, 0x9e, 0x53, 0x38, 0xab, 0x31, 0x59, + 0x14, 0xe2, 0x39, 0x05, 0x63, 0x1d, 0x27, 0x1b, 0xff, 0x7b, 0xf8, 0xd8, 0xe2, 0x7f, 0xe3, 0x27, + 0x10, 0xff, 0x7b, 0xe4, 0xd0, 0xf1, 0xbf, 0x37, 0xe0, 0x4c, 0x2b, 0xac, 0xce, 0xf9, 0x71, 0xd4, + 0x66, 0x97, 0xb6, 0x66, 0xda, 0xd5, 0x1a, 0x49, 0x58, 0x00, 0x71, 0xe8, 0xf2, 0x65, 0x7d, 0x90, + 0x2d, 0xf6, 0x22, 0x4f, 0x6e, 0x3f, 0xbb, 0x41, 0x12, 0xfe, 0x30, 0xb3, 0xbd, 0x98, 0x47, 0x80, + 0x25, 0x93, 0xe6, 0x34, 0xe2, 0x3c, 0x3e, 0x7a, 0xf8, 0xf1, 0xf1, 0x93, 0x09, 0x3f, 0x7e, 0x00, + 0x06, 0xe2, 0x7a, 0x3b, 0xa9, 0x86, 0x3b, 0x01, 0x8b, 0x31, 0x0f, 0xce, 0xbc, 0x43, 0x79, 0x68, + 0x05, 0xfc, 0xce, 0xde, 0xc4, 0x98, 0xfc, 0x5f, 0x73, 0xce, 0x0a, 0x08, 0xfa, 0x46, 0x97, 0x6b, + 0x29, 0xee, 0x71, 0x5e, 0x4b, 0xb9, 0x70, 0xa4, 0x2b, 0x29, 0x79, 0x31, 0xd6, 0x27, 0xde, 0x76, + 0x31, 0xd6, 0xaf, 0x39, 0x30, 0xb2, 0xad, 0x7b, 0xc2, 0x45, 0x1c, 0xd8, 0x42, 0x3e, 0x8a, 0xe1, + 0x60, 0x9f, 0x71, 0xa9, 0xb0, 0x33, 0x40, 0x77, 0xb2, 0x00, 0x6c, 0x8e, 0x24, 0x27, 0x57, 0xe6, + 0xc9, 0x07, 0x95, 0x2b, 0xf3, 0x06, 0x13, 0x66, 0xd2, 0xd2, 0x65, 0xc1, 0x61, 0xbb, 0xa9, 0xb2, + 0x52, 0x30, 0xaa, 0x4c, 0x59, 0x9d, 0x1f, 0xfa, 0x82, 0x03, 0x63, 0xd2, 0x38, 0x13, 0x91, 0xac, + 0x58, 0x24, 0xfb, 0xd9, 0xb4, 0x09, 0x59, 0xb6, 0xf8, 0x7a, 0x86, 0x0f, 0xee, 0xe0, 0x4c, 0x45, + 0xbb, 0xca, 0xad, 0xaa, 0xc5, 0x2c, 0xa7, 0x55, 0x28, 0x32, 0xd3, 0x29, 0x18, 0xeb, 0x38, 0xe8, + 0x9b, 0xea, 0x9b, 0xdf, 0x97, 0x98, 0x54, 0x7f, 0xc9, 0xb2, 0x82, 0x6a, 0xe3, 0xc3, 0xdf, 0xe8, + 0x4b, 0x0e, 0x8c, 0xed, 0x64, 0xbc, 0x1a, 0x22, 0xdb, 0x11, 0xdb, 0xf7, 0x97, 0xf0, 0xe5, 0xce, + 0x42, 0x71, 0xc7, 0x08, 0xd0, 0x6d, 0x00, 0x4f, 0x79, 0xbb, 0x45, 0x56, 0xe4, 0xb2, 0xcd, 0x08, + 0x02, 0xbf, 0xaf, 0x95, 0xfe, 0xc6, 0x1a, 0xbf, 0xfb, 0x4e, 0x74, 0x78, 0x5b, 0x7d, 0x4e, 0xfd, + 0x3f, 0x9f, 0x81, 0x51, 0x33, 0x48, 0x85, 0xde, 0x65, 0x16, 0xc5, 0xbf, 0x98, 0xad, 0x2f, 0x3e, + 0x22, 0xf1, 0x8d, 0x1a, 0xe3, 0x46, 0x11, 0xf0, 0xc2, 0xb1, 0x16, 0x01, 0xef, 0x39, 0x99, 0x22, + 0xe0, 0x63, 0xc7, 0x51, 0x04, 0xfc, 0xf4, 0x91, 0x8a, 0x80, 0x6b, 0x45, 0xd8, 0x7b, 0xef, 0x52, + 0x84, 0x7d, 0x1a, 0x4e, 0xc9, 0x4b, 0x2c, 0x44, 0x54, 0x77, 0xe6, 0xf1, 0xeb, 0x0b, 0xa2, 0xcb, + 0xa9, 0x59, 0xb3, 0x19, 0x67, 0xf1, 0xd1, 0xe7, 0x1d, 0x28, 0x06, 0xac, 0x67, 0x9f, 0xad, 0x2f, + 0xa2, 0x98, 0x5b, 0x8b, 0x59, 0xcd, 0x42, 0x28, 0xc9, 0xb4, 0xdd, 0x22, 0x83, 0xdd, 0x91, 0xff, + 0x60, 0x3e, 0x02, 0xf4, 0x0a, 0x94, 0xc2, 0xcd, 0xcd, 0x46, 0xe8, 0x55, 0xd3, 0x4a, 0xe5, 0x32, + 0xc0, 0xce, 0x2f, 0x21, 0xaa, 0x72, 0x9a, 0xab, 0x5d, 0xf0, 0x70, 0x57, 0x0a, 0xe8, 0x3b, 0x54, + 0x15, 0x49, 0xc2, 0x88, 0x54, 0x53, 0x17, 0xcd, 0x20, 0x9b, 0x33, 0xb1, 0x3e, 0xe7, 0xb2, 0xc9, + 0x87, 0xcf, 0x5e, 0x3d, 0x94, 0x4c, 0x2b, 0xce, 0x0e, 0x0b, 0x45, 0x70, 0xbe, 0x95, 0xe7, 0x21, + 0x8a, 0xc5, 0xd5, 0x9b, 0x83, 0xfc, 0x54, 0xf2, 0xd5, 0x3d, 0x9f, 0xeb, 0x63, 0x8a, 0x71, 0x17, + 0xca, 0x7a, 0x0d, 0xf3, 0x81, 0x93, 0xa9, 0x61, 0xfe, 0x71, 0x80, 0x8a, 0x2c, 0x08, 0x25, 0x7d, + 0x0e, 0x4b, 0x56, 0xee, 0x84, 0x70, 0x9a, 0xda, 0x07, 0x13, 0x15, 0x1b, 0xac, 0xb1, 0x44, 0xff, + 0x3b, 0xb7, 0xdc, 0x3e, 0x77, 0xac, 0xd4, 0xac, 0xef, 0x89, 0xb7, 0x5d, 0xc9, 0xfd, 0x7f, 0xe8, + 0xc0, 0x38, 0xdf, 0x79, 0x59, 0x75, 0x9e, 0x2a, 0x13, 0xe2, 0x92, 0x8a, 0xed, 0x1c, 0x0c, 0x96, + 0x8e, 0x56, 0x36, 0xb8, 0xb2, 0x88, 0xed, 0x01, 0x23, 0x41, 0x5f, 0xc9, 0x31, 0x22, 0x4e, 0xd9, + 0x72, 0x55, 0xe6, 0x97, 0x6a, 0x3f, 0xb3, 0x7f, 0x18, 0xbb, 0xe1, 0x9f, 0x74, 0xf5, 0xa4, 0x22, + 0x36, 0xbc, 0xbf, 0x76, 0x4c, 0x9e, 0x54, 0xbd, 0x9e, 0xfc, 0x91, 0xfc, 0xa9, 0x5f, 0x74, 0x60, + 0xcc, 0xcb, 0xe4, 0x4c, 0x30, 0xf7, 0x8f, 0x15, 0x57, 0xd4, 0x74, 0x94, 0x26, 0x62, 0x30, 0xb5, + 0x2e, 0x9b, 0x9e, 0x81, 0x3b, 0x98, 0x8f, 0x7f, 0xc6, 0xe1, 0x1f, 0xa1, 0xe9, 0xaa, 0x17, 0x6d, + 0x98, 0x7a, 0xd1, 0xb2, 0xcd, 0xcf, 0x60, 0xe8, 0x0a, 0xda, 0xaf, 0x3a, 0x70, 0x36, 0x4f, 0x6c, + 0xe7, 0x0c, 0xe9, 0x23, 0xe6, 0x90, 0x2c, 0x1a, 0x1f, 0xfa, 0x80, 0xec, 0xd4, 0xfe, 0xff, 0xe3, + 0x41, 0x2d, 0xa2, 0x96, 0x90, 0x96, 0xf5, 0x0c, 0xdb, 0x00, 0xfa, 0xfc, 0xa0, 0xe1, 0x07, 0x44, + 0xdc, 0xa6, 0xb3, 0x69, 0x8a, 0x89, 0x6f, 0x6d, 0x50, 0xea, 0x58, 0x70, 0x79, 0xc0, 0x01, 0xb6, + 0xec, 0x77, 0x84, 0x7a, 0x4f, 0xfe, 0x3b, 0x42, 0x3b, 0x30, 0xb8, 0xe3, 0x27, 0x75, 0x96, 0x18, + 0x20, 0xe2, 0x56, 0x16, 0x6e, 0xa1, 0x51, 0x72, 0xe9, 0xdc, 0x6f, 0x4a, 0x06, 0x38, 0xe5, 0x85, + 0xa6, 0x38, 0x63, 0x96, 0x57, 0x9b, 0x4d, 0x78, 0xbc, 0x29, 0x1b, 0x70, 0x8a, 0x43, 0x17, 0x6b, + 0x98, 0xfe, 0x92, 0xd5, 0x66, 0x44, 0x45, 0x52, 0x1b, 0x95, 0xe6, 0x04, 0x45, 0x7e, 0xd7, 0xf3, + 0xa6, 0xc6, 0x03, 0x1b, 0x1c, 0x55, 0x51, 0xd8, 0x81, 0xae, 0x45, 0x61, 0x6f, 0x33, 0x2d, 0x24, + 0xf1, 0x83, 0x36, 0x59, 0x0d, 0x44, 0x36, 0xee, 0xb2, 0x9d, 0x9b, 0xa9, 0x9c, 0x26, 0xb7, 0x2b, + 0xd3, 0xdf, 0x58, 0xe3, 0xa7, 0x85, 0x0f, 0x86, 0x0e, 0x0c, 0x1f, 0xa4, 0x9e, 0x83, 0x61, 0xeb, + 0x9e, 0x83, 0x84, 0xb4, 0xac, 0x78, 0x0e, 0xde, 0x56, 0x36, 0xee, 0x9f, 0x39, 0x80, 0x94, 0x32, + 0xe1, 0xc5, 0x5b, 0xe2, 0xe3, 0x6f, 0xc7, 0x9f, 0xf2, 0xf6, 0x09, 0x07, 0x20, 0x50, 0x5f, 0x9b, + 0xb3, 0x7b, 0x6a, 0x71, 0x9a, 0xe9, 0x00, 0x52, 0x18, 0xd6, 0x78, 0xba, 0xff, 0xdd, 0x49, 0x33, + 0x4b, 0xd3, 0xb9, 0x9f, 0x40, 0x42, 0xd4, 0xae, 0x99, 0x10, 0xb5, 0x6e, 0xd1, 0x03, 0xad, 0xa6, + 0xd1, 0x25, 0x35, 0xea, 0xa7, 0x05, 0x38, 0xa5, 0x23, 0x97, 0xc9, 0x49, 0x3c, 0xec, 0x1d, 0x23, + 0xbf, 0xf1, 0xba, 0xdd, 0xf9, 0x96, 0x45, 0x20, 0x23, 0x2f, 0x97, 0xf6, 0xe3, 0x99, 0x5c, 0xda, + 0x9b, 0xf6, 0x59, 0x1f, 0x9c, 0x50, 0xfb, 0x5f, 0x1d, 0x38, 0x93, 0xe9, 0x71, 0x02, 0x1b, 0x6c, + 0xdb, 0xdc, 0x60, 0x2f, 0x5a, 0x9f, 0x75, 0x97, 0xdd, 0xf5, 0xad, 0x42, 0xc7, 0x6c, 0x99, 0x65, + 0xf2, 0x69, 0x07, 0x8a, 0x89, 0x17, 0x6f, 0xc9, 0xdc, 0xa4, 0x8f, 0x1c, 0xcb, 0x0e, 0x98, 0xa4, + 0xff, 0x0b, 0xe9, 0xac, 0xc6, 0xc7, 0x60, 0x98, 0x73, 0x1f, 0xff, 0x94, 0x03, 0x90, 0x22, 0x3d, + 0x28, 0x95, 0xd5, 0xfd, 0x6e, 0x01, 0xce, 0xe5, 0x6e, 0x23, 0xf4, 0x59, 0xe5, 0x66, 0x72, 0x6c, + 0x67, 0xde, 0x19, 0x8c, 0x74, 0x6f, 0xd3, 0x88, 0xe1, 0x6d, 0x12, 0x4e, 0xa6, 0x07, 0x65, 0x70, + 0x08, 0x31, 0xad, 0x2d, 0xd6, 0x4f, 0x9c, 0x34, 0x99, 0x53, 0x55, 0x9d, 0xf9, 0x73, 0x78, 0xc5, + 0xc2, 0xfd, 0xa9, 0x96, 0x7f, 0x2e, 0x27, 0x7a, 0x02, 0xb2, 0x62, 0xc7, 0x94, 0x15, 0xd8, 0x7e, + 0x38, 0xb4, 0x8b, 0xb0, 0xf8, 0x28, 0xe4, 0xc5, 0x47, 0x0f, 0x57, 0xb2, 0xce, 0xb8, 0xac, 0x58, + 0x38, 0xf4, 0x65, 0xc5, 0x11, 0x18, 0x7a, 0xd9, 0x6f, 0xa9, 0x50, 0xde, 0xe4, 0xf7, 0x7f, 0x7c, + 0xf1, 0xa1, 0x1f, 0xfc, 0xf8, 0xe2, 0x43, 0x3f, 0xfa, 0xf1, 0xc5, 0x87, 0x3e, 0xb1, 0x7f, 0xd1, + 0xf9, 0xfe, 0xfe, 0x45, 0xe7, 0x07, 0xfb, 0x17, 0x9d, 0x1f, 0xed, 0x5f, 0x74, 0xfe, 0xc3, 0xfe, + 0x45, 0xe7, 0x6f, 0xfd, 0xd1, 0xc5, 0x87, 0x5e, 0x1e, 0x90, 0x13, 0xfb, 0x7f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0xd2, 0xea, 0x98, 0x1a, 0xa3, 0xcf, 0x00, 0x00, } func (m *Amount) Marshal() (dAtA []byte, err error) { @@ -4437,6 +5136,91 @@ func (m *Arguments) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ArtGCStatus) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtGCStatus) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtGCStatus) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i-- + if m.NotSpecified { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x18 + if len(m.PodsRecouped) > 0 { + keysForPodsRecouped := make([]string, 0, len(m.PodsRecouped)) + for k := range m.PodsRecouped { + keysForPodsRecouped = append(keysForPodsRecouped, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForPodsRecouped) + for iNdEx := len(keysForPodsRecouped) - 1; iNdEx >= 0; iNdEx-- { + v := m.PodsRecouped[string(keysForPodsRecouped[iNdEx])] + baseI := i + i-- + if v { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x10 + i -= len(keysForPodsRecouped[iNdEx]) + copy(dAtA[i:], keysForPodsRecouped[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForPodsRecouped[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0x12 + } + } + if len(m.StrategiesProcessed) > 0 { + keysForStrategiesProcessed := make([]string, 0, len(m.StrategiesProcessed)) + for k := range m.StrategiesProcessed { + keysForStrategiesProcessed = append(keysForStrategiesProcessed, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForStrategiesProcessed) + for iNdEx := len(keysForStrategiesProcessed) - 1; iNdEx >= 0; iNdEx-- { + v := m.StrategiesProcessed[ArtifactGCStrategy(keysForStrategiesProcessed[iNdEx])] + baseI := i + i-- + if v { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x10 + i -= len(keysForStrategiesProcessed[iNdEx]) + copy(dAtA[i:], keysForStrategiesProcessed[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForStrategiesProcessed[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func (m *Artifact) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -4457,6 +5241,26 @@ func (m *Artifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + i-- + if m.Deleted { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x68 + if m.ArtifactGC != nil { + { + size, err := m.ArtifactGC.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x62 + } i -= len(m.FromExpression) copy(dAtA[i:], m.FromExpression) i = encodeVarintGenerated(dAtA, i, uint64(len(m.FromExpression))) @@ -4533,6 +5337,155 @@ func (m *Artifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ArtifactGC) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactGC) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactGC) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i -= len(m.ServiceAccountName) + copy(dAtA[i:], m.ServiceAccountName) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.ServiceAccountName))) + i-- + dAtA[i] = 0x1a + if m.PodMetadata != nil { + { + size, err := m.PodMetadata.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + i -= len(m.Strategy) + copy(dAtA[i:], m.Strategy) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Strategy))) + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *ArtifactGCSpec) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactGCSpec) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactGCSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.ArtifactsByNode) > 0 { + keysForArtifactsByNode := make([]string, 0, len(m.ArtifactsByNode)) + for k := range m.ArtifactsByNode { + keysForArtifactsByNode = append(keysForArtifactsByNode, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactsByNode) + for iNdEx := len(keysForArtifactsByNode) - 1; iNdEx >= 0; iNdEx-- { + v := m.ArtifactsByNode[string(keysForArtifactsByNode[iNdEx])] + baseI := i + { + size, err := (&v).MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + i -= len(keysForArtifactsByNode[iNdEx]) + copy(dAtA[i:], keysForArtifactsByNode[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForArtifactsByNode[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *ArtifactGCStatus) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactGCStatus) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactGCStatus) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.ArtifactResultsByNode) > 0 { + keysForArtifactResultsByNode := make([]string, 0, len(m.ArtifactResultsByNode)) + for k := range m.ArtifactResultsByNode { + keysForArtifactResultsByNode = append(keysForArtifactResultsByNode, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactResultsByNode) + for iNdEx := len(keysForArtifactResultsByNode) - 1; iNdEx >= 0; iNdEx-- { + v := m.ArtifactResultsByNode[string(keysForArtifactResultsByNode[iNdEx])] + baseI := i + { + size, err := (&v).MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + i -= len(keysForArtifactResultsByNode[iNdEx]) + copy(dAtA[i:], keysForArtifactResultsByNode[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForArtifactResultsByNode[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func (m *ArtifactLocation) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -4553,6 +5506,18 @@ func (m *ArtifactLocation) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Azure != nil { + { + size, err := m.Azure.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } if m.GCS != nil { { size, err := m.GCS.MarshalToSizedBuffer(dAtA[:i]) @@ -4662,6 +5627,70 @@ func (m *ArtifactLocation) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ArtifactNodeSpec) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactNodeSpec) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactNodeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Artifacts) > 0 { + keysForArtifacts := make([]string, 0, len(m.Artifacts)) + for k := range m.Artifacts { + keysForArtifacts = append(keysForArtifacts, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifacts) + for iNdEx := len(keysForArtifacts) - 1; iNdEx >= 0; iNdEx-- { + v := m.Artifacts[string(keysForArtifacts[iNdEx])] + baseI := i + { + size, err := (&v).MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + i -= len(keysForArtifacts[iNdEx]) + copy(dAtA[i:], keysForArtifacts[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForArtifacts[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0x12 + } + } + if m.ArchiveLocation != nil { + { + size, err := m.ArchiveLocation.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *ArtifactPaths) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -4715,6 +5744,18 @@ func (m *ArtifactRepository) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Azure != nil { + { + size, err := m.Azure.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x3a + } if m.GCS != nil { { size, err := m.GCS.MarshalToSizedBuffer(dAtA[:i]) @@ -4879,6 +5920,241 @@ func (m *ArtifactRepositoryRefStatus) MarshalToSizedBuffer(dAtA []byte) (int, er return len(dAtA) - i, nil } +func (m *ArtifactResult) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactResult) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactResult) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Error != nil { + i -= len(*m.Error) + copy(dAtA[i:], *m.Error) + i = encodeVarintGenerated(dAtA, i, uint64(len(*m.Error))) + i-- + dAtA[i] = 0x1a + } + i-- + if m.Success { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x10 + i -= len(m.Name) + copy(dAtA[i:], m.Name) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Name))) + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *ArtifactResultNodeStatus) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactResultNodeStatus) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactResultNodeStatus) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.ArtifactResults) > 0 { + keysForArtifactResults := make([]string, 0, len(m.ArtifactResults)) + for k := range m.ArtifactResults { + keysForArtifactResults = append(keysForArtifactResults, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactResults) + for iNdEx := len(keysForArtifactResults) - 1; iNdEx >= 0; iNdEx-- { + v := m.ArtifactResults[string(keysForArtifactResults[iNdEx])] + baseI := i + { + size, err := (&v).MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + i -= len(keysForArtifactResults[iNdEx]) + copy(dAtA[i:], keysForArtifactResults[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForArtifactResults[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *ArtifactSearchQuery) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactSearchQuery) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactSearchQuery) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.NodeTypes) > 0 { + keysForNodeTypes := make([]string, 0, len(m.NodeTypes)) + for k := range m.NodeTypes { + keysForNodeTypes = append(keysForNodeTypes, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForNodeTypes) + for iNdEx := len(keysForNodeTypes) - 1; iNdEx >= 0; iNdEx-- { + v := m.NodeTypes[NodeType(keysForNodeTypes[iNdEx])] + baseI := i + i-- + if v { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x10 + i -= len(keysForNodeTypes[iNdEx]) + copy(dAtA[i:], keysForNodeTypes[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForNodeTypes[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0x32 + } + } + if m.Deleted != nil { + i-- + if *m.Deleted { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x28 + } + i -= len(m.NodeId) + copy(dAtA[i:], m.NodeId) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.NodeId))) + i-- + dAtA[i] = 0x22 + i -= len(m.TemplateName) + copy(dAtA[i:], m.TemplateName) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.TemplateName))) + i-- + dAtA[i] = 0x1a + i -= len(m.ArtifactName) + copy(dAtA[i:], m.ArtifactName) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.ArtifactName))) + i-- + dAtA[i] = 0x12 + if len(m.ArtifactGCStrategies) > 0 { + keysForArtifactGCStrategies := make([]string, 0, len(m.ArtifactGCStrategies)) + for k := range m.ArtifactGCStrategies { + keysForArtifactGCStrategies = append(keysForArtifactGCStrategies, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactGCStrategies) + for iNdEx := len(keysForArtifactGCStrategies) - 1; iNdEx >= 0; iNdEx-- { + v := m.ArtifactGCStrategies[ArtifactGCStrategy(keysForArtifactGCStrategies[iNdEx])] + baseI := i + i-- + if v { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x10 + i -= len(keysForArtifactGCStrategies[iNdEx]) + copy(dAtA[i:], keysForArtifactGCStrategies[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(keysForArtifactGCStrategies[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintGenerated(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *ArtifactSearchResult) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ArtifactSearchResult) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ArtifactSearchResult) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i -= len(m.NodeID) + copy(dAtA[i:], m.NodeID) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.NodeID))) + i-- + dAtA[i] = 0x12 + { + size, err := m.Artifact.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + func (m *ArtifactoryArtifact) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -5002,6 +6278,135 @@ func (m *ArtifactoryAuth) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *AzureArtifact) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *AzureArtifact) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *AzureArtifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i -= len(m.Blob) + copy(dAtA[i:], m.Blob) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Blob))) + i-- + dAtA[i] = 0x12 + { + size, err := m.AzureBlobContainer.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *AzureArtifactRepository) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *AzureArtifactRepository) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *AzureArtifactRepository) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i -= len(m.BlobNameFormat) + copy(dAtA[i:], m.BlobNameFormat) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.BlobNameFormat))) + i-- + dAtA[i] = 0x12 + { + size, err := m.AzureBlobContainer.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *AzureBlobContainer) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *AzureBlobContainer) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *AzureBlobContainer) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i-- + if m.UseSDKCreds { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x20 + if m.AccountKeySecret != nil { + { + size, err := m.AccountKeySecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + i -= len(m.Container) + copy(dAtA[i:], m.Container) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Container))) + i-- + dAtA[i] = 0x12 + i -= len(m.Endpoint) + copy(dAtA[i:], m.Endpoint) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Endpoint))) + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + func (m *Backoff) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -5047,6 +6452,53 @@ func (m *Backoff) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *BasicAuth) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *BasicAuth) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *BasicAuth) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.PasswordSecret != nil { + { + size, err := m.PasswordSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.UsernameSecret != nil { + { + size, err := m.UsernameSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *Cache) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -5082,6 +6534,53 @@ func (m *Cache) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ClientCertAuth) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ClientCertAuth) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ClientCertAuth) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.ClientKeySecret != nil { + { + size, err := m.ClientKeySecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.ClientCertSecret != nil { + { + size, err := m.ClientCertSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *ClusterWorkflowTemplate) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -6226,6 +7725,19 @@ func (m *GitArtifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + i -= len(m.Branch) + copy(dAtA[i:], m.Branch) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Branch))) + i-- + dAtA[i] = 0x5a + i-- + if m.SingleBranch { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x50 i-- if m.DisableSubmodules { dAtA[i] = 1 @@ -6538,6 +8050,18 @@ func (m *HTTP) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.BodyFrom != nil { + { + size, err := m.BodyFrom.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x42 + } i-- if m.InsecureSkipVerify { dAtA[i] = 1 @@ -6608,6 +8132,18 @@ func (m *HTTPArtifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Auth != nil { + { + size, err := m.Auth.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } if len(m.Headers) > 0 { for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { { @@ -6630,6 +8166,89 @@ func (m *HTTPArtifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *HTTPAuth) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *HTTPAuth) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *HTTPAuth) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.BasicAuth.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + { + size, err := m.OAuth2.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + { + size, err := m.ClientCert.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *HTTPBodySource) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *HTTPBodySource) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *HTTPBodySource) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Bytes != nil { + i -= len(m.Bytes) + copy(dAtA[i:], m.Bytes) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Bytes))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *HTTPHeader) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -7051,6 +8670,41 @@ func (m *Link) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ManifestFrom) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ManifestFrom) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ManifestFrom) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Artifact != nil { + { + size, err := m.Artifact.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *MemoizationStatus) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -7725,6 +9379,121 @@ func (m *NoneStrategy) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *OAuth2Auth) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *OAuth2Auth) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *OAuth2Auth) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.EndpointParams) > 0 { + for iNdEx := len(m.EndpointParams) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.EndpointParams[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x32 + } + } + if len(m.Scopes) > 0 { + for iNdEx := len(m.Scopes) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Scopes[iNdEx]) + copy(dAtA[i:], m.Scopes[iNdEx]) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Scopes[iNdEx]))) + i-- + dAtA[i] = 0x2a + } + } + if m.TokenURLSecret != nil { + { + size, err := m.TokenURLSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + if m.ClientSecretSecret != nil { + { + size, err := m.ClientSecretSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.ClientIDSecret != nil { + { + size, err := m.ClientIDSecret.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *OAuth2EndpointParam) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *OAuth2EndpointParam) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *OAuth2EndpointParam) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + i -= len(m.Value) + copy(dAtA[i:], m.Value) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Value))) + i-- + dAtA[i] = 0x12 + i -= len(m.Key) + copy(dAtA[i:], m.Key) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.Key))) + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + func (m *OSSArtifact) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -8321,6 +10090,18 @@ func (m *ResourceTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.ManifestFrom != nil { + { + size, err := m.ManifestFrom.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x42 + } if len(m.Flags) > 0 { for iNdEx := len(m.Flags) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Flags[iNdEx]) @@ -10168,7 +11949,7 @@ func (m *Workflow) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *WorkflowEventBinding) Marshal() (dAtA []byte, err error) { +func (m *WorkflowArtifactGCTask) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -10178,16 +11959,26 @@ func (m *WorkflowEventBinding) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *WorkflowEventBinding) MarshalTo(dAtA []byte) (int, error) { +func (m *WorkflowArtifactGCTask) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *WorkflowEventBinding) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *WorkflowArtifactGCTask) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l + { + size, err := m.Status.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a { size, err := m.Spec.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -10211,7 +12002,7 @@ func (m *WorkflowEventBinding) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *WorkflowEventBindingList) Marshal() (dAtA []byte, err error) { +func (m *WorkflowArtifactGCTaskList) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -10221,12 +12012,12 @@ func (m *WorkflowEventBindingList) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *WorkflowEventBindingList) MarshalTo(dAtA []byte) (int, error) { +func (m *WorkflowArtifactGCTaskList) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *WorkflowEventBindingList) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *WorkflowArtifactGCTaskList) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -10258,7 +12049,7 @@ func (m *WorkflowEventBindingList) MarshalToSizedBuffer(dAtA []byte) (int, error return len(dAtA) - i, nil } -func (m *WorkflowEventBindingSpec) Marshal() (dAtA []byte, err error) { +func (m *WorkflowEventBinding) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -10268,30 +12059,28 @@ func (m *WorkflowEventBindingSpec) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *WorkflowEventBindingSpec) MarshalTo(dAtA []byte) (int, error) { +func (m *WorkflowEventBinding) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *WorkflowEventBindingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *WorkflowEventBinding) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l - if m.Submit != nil { - { - size, err := m.Submit.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintGenerated(dAtA, i, uint64(size)) + { + size, err := m.Spec.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 { - size, err := m.Event.MarshalToSizedBuffer(dAtA[:i]) + size, err := m.ObjectMeta.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } @@ -10303,7 +12092,7 @@ func (m *WorkflowEventBindingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error return len(dAtA) - i, nil } -func (m *WorkflowList) Marshal() (dAtA []byte, err error) { +func (m *WorkflowEventBindingList) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -10313,12 +12102,104 @@ func (m *WorkflowList) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *WorkflowList) MarshalTo(dAtA []byte) (int, error) { +func (m *WorkflowEventBindingList) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *WorkflowList) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *WorkflowEventBindingList) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Items) > 0 { + for iNdEx := len(m.Items) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Items[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + { + size, err := m.ListMeta.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *WorkflowEventBindingSpec) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *WorkflowEventBindingSpec) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *WorkflowEventBindingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Submit != nil { + { + size, err := m.Submit.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + { + size, err := m.Event.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *WorkflowList) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *WorkflowList) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *WorkflowList) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -10470,6 +12351,20 @@ func (m *WorkflowSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.ArtifactGC != nil { + { + size, err := m.ArtifactGC.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x2 + i-- + dAtA[i] = 0xda + } if m.WorkflowMetadata != nil { { size, err := m.WorkflowMetadata.MarshalToSizedBuffer(dAtA[:i]) @@ -10986,6 +12881,20 @@ func (m *WorkflowStatus) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.ArtifactGCStatus != nil { + { + size, err := m.ArtifactGCStatus.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenerated(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1 + i-- + dAtA[i] = 0x9a + } if m.ArtifactRepositoryRef != nil { { size, err := m.ArtifactRepositoryRef.MarshalToSizedBuffer(dAtA[:i]) @@ -11855,6 +13764,32 @@ func (m *Arguments) Size() (n int) { return n } +func (m *ArtGCStatus) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.StrategiesProcessed) > 0 { + for k, v := range m.StrategiesProcessed { + _ = k + _ = v + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + 1 + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + if len(m.PodsRecouped) > 0 { + for k, v := range m.PodsRecouped { + _ = k + _ = v + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + 1 + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + n += 2 + return n +} + func (m *Artifact) Size() (n int) { if m == nil { return 0 @@ -11884,6 +13819,64 @@ func (m *Artifact) Size() (n int) { n += 2 l = len(m.FromExpression) n += 1 + l + sovGenerated(uint64(l)) + if m.ArtifactGC != nil { + l = m.ArtifactGC.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + n += 2 + return n +} + +func (m *ArtifactGC) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Strategy) + n += 1 + l + sovGenerated(uint64(l)) + if m.PodMetadata != nil { + l = m.PodMetadata.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + l = len(m.ServiceAccountName) + n += 1 + l + sovGenerated(uint64(l)) + return n +} + +func (m *ArtifactGCSpec) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.ArtifactsByNode) > 0 { + for k, v := range m.ArtifactsByNode { + _ = k + _ = v + l = v.Size() + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + l + sovGenerated(uint64(l)) + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + return n +} + +func (m *ArtifactGCStatus) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.ArtifactResultsByNode) > 0 { + for k, v := range m.ArtifactResultsByNode { + _ = k + _ = v + l = v.Size() + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + l + sovGenerated(uint64(l)) + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } return n } @@ -11928,6 +13921,32 @@ func (m *ArtifactLocation) Size() (n int) { l = m.GCS.Size() n += 1 + l + sovGenerated(uint64(l)) } + if m.Azure != nil { + l = m.Azure.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + +func (m *ArtifactNodeSpec) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ArchiveLocation != nil { + l = m.ArchiveLocation.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if len(m.Artifacts) > 0 { + for k, v := range m.Artifacts { + _ = k + _ = v + l = v.Size() + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + l + sovGenerated(uint64(l)) + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } return n } @@ -11971,6 +13990,10 @@ func (m *ArtifactRepository) Size() (n int) { l = m.GCS.Size() n += 1 + l + sovGenerated(uint64(l)) } + if m.Azure != nil { + l = m.Azure.Size() + n += 1 + l + sovGenerated(uint64(l)) + } return n } @@ -12005,6 +14028,87 @@ func (m *ArtifactRepositoryRefStatus) Size() (n int) { return n } +func (m *ArtifactResult) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Name) + n += 1 + l + sovGenerated(uint64(l)) + n += 2 + if m.Error != nil { + l = len(*m.Error) + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + +func (m *ArtifactResultNodeStatus) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.ArtifactResults) > 0 { + for k, v := range m.ArtifactResults { + _ = k + _ = v + l = v.Size() + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + l + sovGenerated(uint64(l)) + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + return n +} + +func (m *ArtifactSearchQuery) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.ArtifactGCStrategies) > 0 { + for k, v := range m.ArtifactGCStrategies { + _ = k + _ = v + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + 1 + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + l = len(m.ArtifactName) + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.TemplateName) + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.NodeId) + n += 1 + l + sovGenerated(uint64(l)) + if m.Deleted != nil { + n += 2 + } + if len(m.NodeTypes) > 0 { + for k, v := range m.NodeTypes { + _ = k + _ = v + mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + 1 + n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize)) + } + } + return n +} + +func (m *ArtifactSearchResult) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.Artifact.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.NodeID) + n += 1 + l + sovGenerated(uint64(l)) + return n +} + func (m *ArtifactoryArtifact) Size() (n int) { if m == nil { return 0 @@ -12048,6 +14152,50 @@ func (m *ArtifactoryAuth) Size() (n int) { return n } +func (m *AzureArtifact) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.AzureBlobContainer.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.Blob) + n += 1 + l + sovGenerated(uint64(l)) + return n +} + +func (m *AzureArtifactRepository) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.AzureBlobContainer.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.BlobNameFormat) + n += 1 + l + sovGenerated(uint64(l)) + return n +} + +func (m *AzureBlobContainer) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Endpoint) + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.Container) + n += 1 + l + sovGenerated(uint64(l)) + if m.AccountKeySecret != nil { + l = m.AccountKeySecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + n += 2 + return n +} + func (m *Backoff) Size() (n int) { if m == nil { return 0 @@ -12065,6 +14213,23 @@ func (m *Backoff) Size() (n int) { return n } +func (m *BasicAuth) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.UsernameSecret != nil { + l = m.UsernameSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if m.PasswordSecret != nil { + l = m.PasswordSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + func (m *Cache) Size() (n int) { if m == nil { return 0 @@ -12078,6 +14243,23 @@ func (m *Cache) Size() (n int) { return n } +func (m *ClientCertAuth) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ClientCertSecret != nil { + l = m.ClientCertSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if m.ClientKeySecret != nil { + l = m.ClientKeySecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + func (m *ClusterWorkflowTemplate) Size() (n int) { if m == nil { return 0 @@ -12520,6 +14702,9 @@ func (m *GitArtifact) Size() (n int) { } n += 2 n += 2 + n += 2 + l = len(m.Branch) + n += 1 + l + sovGenerated(uint64(l)) return n } @@ -12621,6 +14806,10 @@ func (m *HTTP) Size() (n int) { l = len(m.SuccessCondition) n += 1 + l + sovGenerated(uint64(l)) n += 2 + if m.BodyFrom != nil { + l = m.BodyFrom.Size() + n += 1 + l + sovGenerated(uint64(l)) + } return n } @@ -12638,6 +14827,38 @@ func (m *HTTPArtifact) Size() (n int) { n += 1 + l + sovGenerated(uint64(l)) } } + if m.Auth != nil { + l = m.Auth.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + +func (m *HTTPAuth) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.ClientCert.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = m.OAuth2.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = m.BasicAuth.Size() + n += 1 + l + sovGenerated(uint64(l)) + return n +} + +func (m *HTTPBodySource) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Bytes != nil { + l = len(m.Bytes) + n += 1 + l + sovGenerated(uint64(l)) + } return n } @@ -12810,6 +15031,19 @@ func (m *Link) Size() (n int) { return n } +func (m *ManifestFrom) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Artifact != nil { + l = m.Artifact.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + return n +} + func (m *MemoizationStatus) Size() (n int) { if m == nil { return 0 @@ -13059,6 +15293,52 @@ func (m *NoneStrategy) Size() (n int) { return n } +func (m *OAuth2Auth) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ClientIDSecret != nil { + l = m.ClientIDSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if m.ClientSecretSecret != nil { + l = m.ClientSecretSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if m.TokenURLSecret != nil { + l = m.TokenURLSecret.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + if len(m.Scopes) > 0 { + for _, s := range m.Scopes { + l = len(s) + n += 1 + l + sovGenerated(uint64(l)) + } + } + if len(m.EndpointParams) > 0 { + for _, e := range m.EndpointParams { + l = e.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + } + return n +} + +func (m *OAuth2EndpointParam) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Key) + n += 1 + l + sovGenerated(uint64(l)) + l = len(m.Value) + n += 1 + l + sovGenerated(uint64(l)) + return n +} + func (m *OSSArtifact) Size() (n int) { if m == nil { return 0 @@ -13305,6 +15585,10 @@ func (m *ResourceTemplate) Size() (n int) { n += 1 + l + sovGenerated(uint64(l)) } } + if m.ManifestFrom != nil { + l = m.ManifestFrom.Size() + n += 1 + l + sovGenerated(uint64(l)) + } return n } @@ -13945,6 +16229,38 @@ func (m *Workflow) Size() (n int) { return n } +func (m *WorkflowArtifactGCTask) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.ObjectMeta.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = m.Spec.Size() + n += 1 + l + sovGenerated(uint64(l)) + l = m.Status.Size() + n += 1 + l + sovGenerated(uint64(l)) + return n +} + +func (m *WorkflowArtifactGCTaskList) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.ListMeta.Size() + n += 1 + l + sovGenerated(uint64(l)) + if len(m.Items) > 0 { + for _, e := range m.Items { + l = e.Size() + n += 1 + l + sovGenerated(uint64(l)) + } + } + return n +} + func (m *WorkflowEventBinding) Size() (n int) { if m == nil { return 0 @@ -14208,6 +16524,10 @@ func (m *WorkflowSpec) Size() (n int) { l = m.WorkflowMetadata.Size() n += 2 + l + sovGenerated(uint64(l)) } + if m.ArtifactGC != nil { + l = m.ArtifactGC.Size() + n += 2 + l + sovGenerated(uint64(l)) + } return n } @@ -14286,6 +16606,10 @@ func (m *WorkflowStatus) Size() (n int) { l = m.ArtifactRepositoryRef.Size() n += 2 + l + sovGenerated(uint64(l)) } + if m.ArtifactGCStatus != nil { + l = m.ArtifactGCStatus.Size() + n += 2 + l + sovGenerated(uint64(l)) + } return n } @@ -14539,6 +16863,38 @@ func (this *Arguments) String() string { }, "") return s } +func (this *ArtGCStatus) String() string { + if this == nil { + return "nil" + } + keysForStrategiesProcessed := make([]string, 0, len(this.StrategiesProcessed)) + for k := range this.StrategiesProcessed { + keysForStrategiesProcessed = append(keysForStrategiesProcessed, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForStrategiesProcessed) + mapStringForStrategiesProcessed := "map[ArtifactGCStrategy]bool{" + for _, k := range keysForStrategiesProcessed { + mapStringForStrategiesProcessed += fmt.Sprintf("%v: %v,", k, this.StrategiesProcessed[ArtifactGCStrategy(k)]) + } + mapStringForStrategiesProcessed += "}" + keysForPodsRecouped := make([]string, 0, len(this.PodsRecouped)) + for k := range this.PodsRecouped { + keysForPodsRecouped = append(keysForPodsRecouped, k) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForPodsRecouped) + mapStringForPodsRecouped := "map[string]bool{" + for _, k := range keysForPodsRecouped { + mapStringForPodsRecouped += fmt.Sprintf("%v: %v,", k, this.PodsRecouped[k]) + } + mapStringForPodsRecouped += "}" + s := strings.Join([]string{`&ArtGCStatus{`, + `StrategiesProcessed:` + mapStringForStrategiesProcessed + `,`, + `PodsRecouped:` + mapStringForPodsRecouped + `,`, + `NotSpecified:` + fmt.Sprintf("%v", this.NotSpecified) + `,`, + `}`, + }, "") + return s +} func (this *Artifact) String() string { if this == nil { return "nil" @@ -14555,6 +16911,60 @@ func (this *Artifact) String() string { `SubPath:` + fmt.Sprintf("%v", this.SubPath) + `,`, `RecurseMode:` + fmt.Sprintf("%v", this.RecurseMode) + `,`, `FromExpression:` + fmt.Sprintf("%v", this.FromExpression) + `,`, + `ArtifactGC:` + strings.Replace(this.ArtifactGC.String(), "ArtifactGC", "ArtifactGC", 1) + `,`, + `Deleted:` + fmt.Sprintf("%v", this.Deleted) + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactGC) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ArtifactGC{`, + `Strategy:` + fmt.Sprintf("%v", this.Strategy) + `,`, + `PodMetadata:` + strings.Replace(this.PodMetadata.String(), "Metadata", "Metadata", 1) + `,`, + `ServiceAccountName:` + fmt.Sprintf("%v", this.ServiceAccountName) + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactGCSpec) String() string { + if this == nil { + return "nil" + } + keysForArtifactsByNode := make([]string, 0, len(this.ArtifactsByNode)) + for k := range this.ArtifactsByNode { + keysForArtifactsByNode = append(keysForArtifactsByNode, k) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactsByNode) + mapStringForArtifactsByNode := "map[string]ArtifactNodeSpec{" + for _, k := range keysForArtifactsByNode { + mapStringForArtifactsByNode += fmt.Sprintf("%v: %v,", k, this.ArtifactsByNode[k]) + } + mapStringForArtifactsByNode += "}" + s := strings.Join([]string{`&ArtifactGCSpec{`, + `ArtifactsByNode:` + mapStringForArtifactsByNode + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactGCStatus) String() string { + if this == nil { + return "nil" + } + keysForArtifactResultsByNode := make([]string, 0, len(this.ArtifactResultsByNode)) + for k := range this.ArtifactResultsByNode { + keysForArtifactResultsByNode = append(keysForArtifactResultsByNode, k) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactResultsByNode) + mapStringForArtifactResultsByNode := "map[string]ArtifactResultNodeStatus{" + for _, k := range keysForArtifactResultsByNode { + mapStringForArtifactResultsByNode += fmt.Sprintf("%v: %v,", k, this.ArtifactResultsByNode[k]) + } + mapStringForArtifactResultsByNode += "}" + s := strings.Join([]string{`&ArtifactGCStatus{`, + `ArtifactResultsByNode:` + mapStringForArtifactResultsByNode + `,`, `}`, }, "") return s @@ -14573,6 +16983,28 @@ func (this *ArtifactLocation) String() string { `Raw:` + strings.Replace(this.Raw.String(), "RawArtifact", "RawArtifact", 1) + `,`, `OSS:` + strings.Replace(this.OSS.String(), "OSSArtifact", "OSSArtifact", 1) + `,`, `GCS:` + strings.Replace(this.GCS.String(), "GCSArtifact", "GCSArtifact", 1) + `,`, + `Azure:` + strings.Replace(this.Azure.String(), "AzureArtifact", "AzureArtifact", 1) + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactNodeSpec) String() string { + if this == nil { + return "nil" + } + keysForArtifacts := make([]string, 0, len(this.Artifacts)) + for k := range this.Artifacts { + keysForArtifacts = append(keysForArtifacts, k) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifacts) + mapStringForArtifacts := "map[string]Artifact{" + for _, k := range keysForArtifacts { + mapStringForArtifacts += fmt.Sprintf("%v: %v,", k, this.Artifacts[k]) + } + mapStringForArtifacts += "}" + s := strings.Join([]string{`&ArtifactNodeSpec{`, + `ArchiveLocation:` + strings.Replace(this.ArchiveLocation.String(), "ArtifactLocation", "ArtifactLocation", 1) + `,`, + `Artifacts:` + mapStringForArtifacts + `,`, `}`, }, "") return s @@ -14598,6 +17030,85 @@ func (this *ArtifactRepository) String() string { `HDFS:` + strings.Replace(this.HDFS.String(), "HDFSArtifactRepository", "HDFSArtifactRepository", 1) + `,`, `OSS:` + strings.Replace(this.OSS.String(), "OSSArtifactRepository", "OSSArtifactRepository", 1) + `,`, `GCS:` + strings.Replace(this.GCS.String(), "GCSArtifactRepository", "GCSArtifactRepository", 1) + `,`, + `Azure:` + strings.Replace(this.Azure.String(), "AzureArtifactRepository", "AzureArtifactRepository", 1) + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactResult) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ArtifactResult{`, + `Name:` + fmt.Sprintf("%v", this.Name) + `,`, + `Success:` + fmt.Sprintf("%v", this.Success) + `,`, + `Error:` + valueToStringGenerated(this.Error) + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactResultNodeStatus) String() string { + if this == nil { + return "nil" + } + keysForArtifactResults := make([]string, 0, len(this.ArtifactResults)) + for k := range this.ArtifactResults { + keysForArtifactResults = append(keysForArtifactResults, k) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactResults) + mapStringForArtifactResults := "map[string]ArtifactResult{" + for _, k := range keysForArtifactResults { + mapStringForArtifactResults += fmt.Sprintf("%v: %v,", k, this.ArtifactResults[k]) + } + mapStringForArtifactResults += "}" + s := strings.Join([]string{`&ArtifactResultNodeStatus{`, + `ArtifactResults:` + mapStringForArtifactResults + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactSearchQuery) String() string { + if this == nil { + return "nil" + } + keysForArtifactGCStrategies := make([]string, 0, len(this.ArtifactGCStrategies)) + for k := range this.ArtifactGCStrategies { + keysForArtifactGCStrategies = append(keysForArtifactGCStrategies, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForArtifactGCStrategies) + mapStringForArtifactGCStrategies := "map[ArtifactGCStrategy]bool{" + for _, k := range keysForArtifactGCStrategies { + mapStringForArtifactGCStrategies += fmt.Sprintf("%v: %v,", k, this.ArtifactGCStrategies[ArtifactGCStrategy(k)]) + } + mapStringForArtifactGCStrategies += "}" + keysForNodeTypes := make([]string, 0, len(this.NodeTypes)) + for k := range this.NodeTypes { + keysForNodeTypes = append(keysForNodeTypes, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForNodeTypes) + mapStringForNodeTypes := "map[NodeType]bool{" + for _, k := range keysForNodeTypes { + mapStringForNodeTypes += fmt.Sprintf("%v: %v,", k, this.NodeTypes[NodeType(k)]) + } + mapStringForNodeTypes += "}" + s := strings.Join([]string{`&ArtifactSearchQuery{`, + `ArtifactGCStrategies:` + mapStringForArtifactGCStrategies + `,`, + `ArtifactName:` + fmt.Sprintf("%v", this.ArtifactName) + `,`, + `TemplateName:` + fmt.Sprintf("%v", this.TemplateName) + `,`, + `NodeId:` + fmt.Sprintf("%v", this.NodeId) + `,`, + `Deleted:` + valueToStringGenerated(this.Deleted) + `,`, + `NodeTypes:` + mapStringForNodeTypes + `,`, + `}`, + }, "") + return s +} +func (this *ArtifactSearchResult) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ArtifactSearchResult{`, + `Artifact:` + strings.Replace(strings.Replace(this.Artifact.String(), "Artifact", "Artifact", 1), `&`, ``, 1) + `,`, + `NodeID:` + fmt.Sprintf("%v", this.NodeID) + `,`, `}`, }, "") return s @@ -14635,6 +17146,41 @@ func (this *ArtifactoryAuth) String() string { }, "") return s } +func (this *AzureArtifact) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&AzureArtifact{`, + `AzureBlobContainer:` + strings.Replace(strings.Replace(this.AzureBlobContainer.String(), "AzureBlobContainer", "AzureBlobContainer", 1), `&`, ``, 1) + `,`, + `Blob:` + fmt.Sprintf("%v", this.Blob) + `,`, + `}`, + }, "") + return s +} +func (this *AzureArtifactRepository) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&AzureArtifactRepository{`, + `AzureBlobContainer:` + strings.Replace(strings.Replace(this.AzureBlobContainer.String(), "AzureBlobContainer", "AzureBlobContainer", 1), `&`, ``, 1) + `,`, + `BlobNameFormat:` + fmt.Sprintf("%v", this.BlobNameFormat) + `,`, + `}`, + }, "") + return s +} +func (this *AzureBlobContainer) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&AzureBlobContainer{`, + `Endpoint:` + fmt.Sprintf("%v", this.Endpoint) + `,`, + `Container:` + fmt.Sprintf("%v", this.Container) + `,`, + `AccountKeySecret:` + strings.Replace(fmt.Sprintf("%v", this.AccountKeySecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `UseSDKCreds:` + fmt.Sprintf("%v", this.UseSDKCreds) + `,`, + `}`, + }, "") + return s +} func (this *Backoff) String() string { if this == nil { return "nil" @@ -14647,6 +17193,17 @@ func (this *Backoff) String() string { }, "") return s } +func (this *BasicAuth) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&BasicAuth{`, + `UsernameSecret:` + strings.Replace(fmt.Sprintf("%v", this.UsernameSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `PasswordSecret:` + strings.Replace(fmt.Sprintf("%v", this.PasswordSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `}`, + }, "") + return s +} func (this *Cache) String() string { if this == nil { return "nil" @@ -14657,6 +17214,17 @@ func (this *Cache) String() string { }, "") return s } +func (this *ClientCertAuth) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ClientCertAuth{`, + `ClientCertSecret:` + strings.Replace(fmt.Sprintf("%v", this.ClientCertSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `ClientKeySecret:` + strings.Replace(fmt.Sprintf("%v", this.ClientKeySecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `}`, + }, "") + return s +} func (this *ClusterWorkflowTemplate) String() string { if this == nil { return "nil" @@ -14998,6 +17566,8 @@ func (this *GitArtifact) String() string { `SSHPrivateKeySecret:` + strings.Replace(fmt.Sprintf("%v", this.SSHPrivateKeySecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, `InsecureIgnoreHostKey:` + fmt.Sprintf("%v", this.InsecureIgnoreHostKey) + `,`, `DisableSubmodules:` + fmt.Sprintf("%v", this.DisableSubmodules) + `,`, + `SingleBranch:` + fmt.Sprintf("%v", this.SingleBranch) + `,`, + `Branch:` + fmt.Sprintf("%v", this.Branch) + `,`, `}`, }, "") return s @@ -15070,6 +17640,7 @@ func (this *HTTP) String() string { `Body:` + fmt.Sprintf("%v", this.Body) + `,`, `SuccessCondition:` + fmt.Sprintf("%v", this.SuccessCondition) + `,`, `InsecureSkipVerify:` + fmt.Sprintf("%v", this.InsecureSkipVerify) + `,`, + `BodyFrom:` + strings.Replace(this.BodyFrom.String(), "HTTPBodySource", "HTTPBodySource", 1) + `,`, `}`, }, "") return s @@ -15086,6 +17657,29 @@ func (this *HTTPArtifact) String() string { s := strings.Join([]string{`&HTTPArtifact{`, `URL:` + fmt.Sprintf("%v", this.URL) + `,`, `Headers:` + repeatedStringForHeaders + `,`, + `Auth:` + strings.Replace(this.Auth.String(), "HTTPAuth", "HTTPAuth", 1) + `,`, + `}`, + }, "") + return s +} +func (this *HTTPAuth) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&HTTPAuth{`, + `ClientCert:` + strings.Replace(strings.Replace(this.ClientCert.String(), "ClientCertAuth", "ClientCertAuth", 1), `&`, ``, 1) + `,`, + `OAuth2:` + strings.Replace(strings.Replace(this.OAuth2.String(), "OAuth2Auth", "OAuth2Auth", 1), `&`, ``, 1) + `,`, + `BasicAuth:` + strings.Replace(strings.Replace(this.BasicAuth.String(), "BasicAuth", "BasicAuth", 1), `&`, ``, 1) + `,`, + `}`, + }, "") + return s +} +func (this *HTTPBodySource) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&HTTPBodySource{`, + `Bytes:` + valueToStringGenerated(this.Bytes) + `,`, `}`, }, "") return s @@ -15215,6 +17809,16 @@ func (this *Link) String() string { }, "") return s } +func (this *ManifestFrom) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ManifestFrom{`, + `Artifact:` + strings.Replace(this.Artifact.String(), "Artifact", "Artifact", 1) + `,`, + `}`, + }, "") + return s +} func (this *MemoizationStatus) String() string { if this == nil { return "nil" @@ -15413,6 +18017,36 @@ func (this *NoneStrategy) String() string { }, "") return s } +func (this *OAuth2Auth) String() string { + if this == nil { + return "nil" + } + repeatedStringForEndpointParams := "[]OAuth2EndpointParam{" + for _, f := range this.EndpointParams { + repeatedStringForEndpointParams += strings.Replace(strings.Replace(f.String(), "OAuth2EndpointParam", "OAuth2EndpointParam", 1), `&`, ``, 1) + "," + } + repeatedStringForEndpointParams += "}" + s := strings.Join([]string{`&OAuth2Auth{`, + `ClientIDSecret:` + strings.Replace(fmt.Sprintf("%v", this.ClientIDSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `ClientSecretSecret:` + strings.Replace(fmt.Sprintf("%v", this.ClientSecretSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `TokenURLSecret:` + strings.Replace(fmt.Sprintf("%v", this.TokenURLSecret), "SecretKeySelector", "v1.SecretKeySelector", 1) + `,`, + `Scopes:` + fmt.Sprintf("%v", this.Scopes) + `,`, + `EndpointParams:` + repeatedStringForEndpointParams + `,`, + `}`, + }, "") + return s +} +func (this *OAuth2EndpointParam) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&OAuth2EndpointParam{`, + `Key:` + fmt.Sprintf("%v", this.Key) + `,`, + `Value:` + fmt.Sprintf("%v", this.Value) + `,`, + `}`, + }, "") + return s +} func (this *OSSArtifact) String() string { if this == nil { return "nil" @@ -15589,6 +18223,7 @@ func (this *ResourceTemplate) String() string { `SuccessCondition:` + fmt.Sprintf("%v", this.SuccessCondition) + `,`, `FailureCondition:` + fmt.Sprintf("%v", this.FailureCondition) + `,`, `Flags:` + fmt.Sprintf("%v", this.Flags) + `,`, + `ManifestFrom:` + strings.Replace(this.ManifestFrom.String(), "ManifestFrom", "ManifestFrom", 1) + `,`, `}`, }, "") return s @@ -16021,6 +18656,34 @@ func (this *Workflow) String() string { }, "") return s } +func (this *WorkflowArtifactGCTask) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&WorkflowArtifactGCTask{`, + `ObjectMeta:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ObjectMeta), "ObjectMeta", "v11.ObjectMeta", 1), `&`, ``, 1) + `,`, + `Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "ArtifactGCSpec", "ArtifactGCSpec", 1), `&`, ``, 1) + `,`, + `Status:` + strings.Replace(strings.Replace(this.Status.String(), "ArtifactGCStatus", "ArtifactGCStatus", 1), `&`, ``, 1) + `,`, + `}`, + }, "") + return s +} +func (this *WorkflowArtifactGCTaskList) String() string { + if this == nil { + return "nil" + } + repeatedStringForItems := "[]WorkflowArtifactGCTask{" + for _, f := range this.Items { + repeatedStringForItems += strings.Replace(strings.Replace(f.String(), "WorkflowArtifactGCTask", "WorkflowArtifactGCTask", 1), `&`, ``, 1) + "," + } + repeatedStringForItems += "}" + s := strings.Join([]string{`&WorkflowArtifactGCTaskList{`, + `ListMeta:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ListMeta), "ListMeta", "v11.ListMeta", 1), `&`, ``, 1) + `,`, + `Items:` + repeatedStringForItems + `,`, + `}`, + }, "") + return s +} func (this *WorkflowEventBinding) String() string { if this == nil { return "nil" @@ -16213,6 +18876,7 @@ func (this *WorkflowSpec) String() string { `ArchiveLogs:` + valueToStringGenerated(this.ArchiveLogs) + `,`, `Hooks:` + mapStringForHooks + `,`, `WorkflowMetadata:` + strings.Replace(this.WorkflowMetadata.String(), "WorkflowMetadata", "WorkflowMetadata", 1) + `,`, + `ArtifactGC:` + strings.Replace(this.ArtifactGC.String(), "ArtifactGC", "ArtifactGC", 1) + `,`, `}`, }, "") return s @@ -16279,6 +18943,7 @@ func (this *WorkflowStatus) String() string { `EstimatedDuration:` + fmt.Sprintf("%v", this.EstimatedDuration) + `,`, `Progress:` + fmt.Sprintf("%v", this.Progress) + `,`, `ArtifactRepositoryRef:` + strings.Replace(fmt.Sprintf("%v", this.ArtifactRepositoryRef), "ArtifactRepositoryRefStatus", "ArtifactRepositoryRefStatus", 1) + `,`, + `ArtifactGCStatus:` + strings.Replace(this.ArtifactGCStatus.String(), "ArtGCStatus", "ArtGCStatus", 1) + `,`, `}`, }, "") return s @@ -16827,7 +19492,7 @@ func (m *Arguments) Unmarshal(dAtA []byte) error { } return nil } -func (m *Artifact) Unmarshal(dAtA []byte) error { +func (m *ArtGCStatus) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -16850,131 +19515,15 @@ func (m *Artifact) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Artifact: wiretype end group for non-group") + return fmt.Errorf("proto: ArtGCStatus: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Artifact: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ArtGCStatus: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Path = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Mode", wireType) - } - var v int32 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int32(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.Mode = &v - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.From = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ArtifactLocation", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field StrategiesProcessed", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17001,45 +19550,461 @@ func (m *Artifact) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.ArtifactLocation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err + if m.StrategiesProcessed == nil { + m.StrategiesProcessed = make(map[ArtifactGCStrategy]bool) } - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field GlobalName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF + var mapkey ArtifactGCStrategy + var mapvalue bool + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = ArtifactGCStrategy(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapvaluetemp int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapvaluetemp |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + mapvalue = bool(mapvaluetemp != 0) + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy } } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.GlobalName = string(dAtA[iNdEx:postIndex]) + m.StrategiesProcessed[ArtifactGCStrategy(mapkey)] = mapvalue iNdEx = postIndex - case 7: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Archive", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PodsRecouped", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.PodsRecouped == nil { + m.PodsRecouped = make(map[string]bool) + } + var mapkey string + var mapvalue bool + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapvaluetemp int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapvaluetemp |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + mapvalue = bool(mapvaluetemp != 0) + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.PodsRecouped[mapkey] = mapvalue + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field NotSpecified", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.NotSpecified = bool(v != 0) + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Artifact) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Artifact: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Artifact: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Name = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Path = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Mode", wireType) + } + var v int32 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Mode = &v + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.From = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactLocation", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.ArtifactLocation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field GlobalName", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.GlobalName = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Archive", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17177,6 +20142,62 @@ func (m *Artifact) Unmarshal(dAtA []byte) error { } m.FromExpression = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 12: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactGC", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactGC == nil { + m.ArtifactGC = &ArtifactGC{} + } + if err := m.ArtifactGC.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 13: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Deleted", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Deleted = bool(v != 0) default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -17198,7 +20219,7 @@ func (m *Artifact) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { +func (m *ArtifactGC) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -17221,17 +20242,17 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactLocation: wiretype end group for non-group") + return fmt.Errorf("proto: ArtifactGC: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactLocation: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ArtifactGC: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field ArchiveLogs", wireType) + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Strategy", wireType) } - var v int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17241,16 +20262,27 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - v |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - b := bool(v != 0) - m.ArchiveLogs = &b + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Strategy = ArtifactGCStrategy(dAtA[iNdEx:postIndex]) + iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field S3", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PodMetadata", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17277,18 +20309,18 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.S3 == nil { - m.S3 = &S3Artifact{} + if m.PodMetadata == nil { + m.PodMetadata = &Metadata{} } - if err := m.S3.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.PodMetadata.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Git", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ServiceAccountName", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17298,33 +20330,2131 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.Git == nil { - m.Git = &GitArtifact{} - } - if err := m.Git.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.ServiceAccountName = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { return err } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactGCSpec) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactGCSpec: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactGCSpec: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactsByNode", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactsByNode == nil { + m.ArtifactsByNode = make(map[string]ArtifactNodeSpec) + } + var mapkey string + mapvalue := &ArtifactNodeSpec{} + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapmsglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapmsglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if mapmsglen < 0 { + return ErrInvalidLengthGenerated + } + postmsgIndex := iNdEx + mapmsglen + if postmsgIndex < 0 { + return ErrInvalidLengthGenerated + } + if postmsgIndex > l { + return io.ErrUnexpectedEOF + } + mapvalue = &ArtifactNodeSpec{} + if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { + return err + } + iNdEx = postmsgIndex + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.ArtifactsByNode[mapkey] = *mapvalue + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactGCStatus) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactGCStatus: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactGCStatus: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactResultsByNode", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactResultsByNode == nil { + m.ArtifactResultsByNode = make(map[string]ArtifactResultNodeStatus) + } + var mapkey string + mapvalue := &ArtifactResultNodeStatus{} + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapmsglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapmsglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if mapmsglen < 0 { + return ErrInvalidLengthGenerated + } + postmsgIndex := iNdEx + mapmsglen + if postmsgIndex < 0 { + return ErrInvalidLengthGenerated + } + if postmsgIndex > l { + return io.ErrUnexpectedEOF + } + mapvalue = &ArtifactResultNodeStatus{} + if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { + return err + } + iNdEx = postmsgIndex + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.ArtifactResultsByNode[mapkey] = *mapvalue + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactLocation: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactLocation: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ArchiveLogs", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + b := bool(v != 0) + m.ArchiveLogs = &b + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field S3", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.S3 == nil { + m.S3 = &S3Artifact{} + } + if err := m.S3.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Git", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Git == nil { + m.Git = &GitArtifact{} + } + if err := m.Git.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field HTTP", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.HTTP == nil { + m.HTTP = &HTTPArtifact{} + } + if err := m.HTTP.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Artifactory", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Artifactory == nil { + m.Artifactory = &ArtifactoryArtifact{} + } + if err := m.Artifactory.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field HDFS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.HDFS == nil { + m.HDFS = &HDFSArtifact{} + } + if err := m.HDFS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Raw", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Raw == nil { + m.Raw = &RawArtifact{} + } + if err := m.Raw.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field OSS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.OSS == nil { + m.OSS = &OSSArtifact{} + } + if err := m.OSS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field GCS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.GCS == nil { + m.GCS = &GCSArtifact{} + } + if err := m.GCS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Azure", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Azure == nil { + m.Azure = &AzureArtifact{} + } + if err := m.Azure.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactNodeSpec) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactNodeSpec: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactNodeSpec: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArchiveLocation", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArchiveLocation == nil { + m.ArchiveLocation = &ArtifactLocation{} + } + if err := m.ArchiveLocation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Artifacts", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Artifacts == nil { + m.Artifacts = make(map[string]Artifact) + } + var mapkey string + mapvalue := &Artifact{} + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapmsglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapmsglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if mapmsglen < 0 { + return ErrInvalidLengthGenerated + } + postmsgIndex := iNdEx + mapmsglen + if postmsgIndex < 0 { + return ErrInvalidLengthGenerated + } + if postmsgIndex > l { + return io.ErrUnexpectedEOF + } + mapvalue = &Artifact{} + if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { + return err + } + iNdEx = postmsgIndex + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.Artifacts[mapkey] = *mapvalue + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactPaths) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactPaths: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactPaths: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Artifact", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Artifact.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactRepository: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactRepository: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ArchiveLogs", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + b := bool(v != 0) + m.ArchiveLogs = &b + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field S3", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.S3 == nil { + m.S3 = &S3ArtifactRepository{} + } + if err := m.S3.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Artifactory", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Artifactory == nil { + m.Artifactory = &ArtifactoryArtifactRepository{} + } + if err := m.Artifactory.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field HDFS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.HDFS == nil { + m.HDFS = &HDFSArtifactRepository{} + } + if err := m.HDFS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field OSS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.OSS == nil { + m.OSS = &OSSArtifactRepository{} + } + if err := m.OSS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field GCS", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.GCS == nil { + m.GCS = &GCSArtifactRepository{} + } + if err := m.GCS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Azure", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Azure == nil { + m.Azure = &AzureArtifactRepository{} + } + if err := m.Azure.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactRepositoryRef: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactRepositoryRef: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ConfigMap", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ConfigMap = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Key = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactRepositoryRefStatus: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactRepositoryRefStatus: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactRepositoryRef", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.ArtifactRepositoryRef.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Namespace = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Default", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Default = bool(v != 0) + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactRepository", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactRepository == nil { + m.ArtifactRepository = &ArtifactRepository{} + } + if err := m.ArtifactRepository.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactResult) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactResult: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactResult: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Name = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Success", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Success = bool(v != 0) + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Error", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + s := string(dAtA[iNdEx:postIndex]) + m.Error = &s + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactResultNodeStatus) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactResultNodeStatus: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactResultNodeStatus: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactResults", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactResults == nil { + m.ArtifactResults = make(map[string]ArtifactResult) + } + var mapkey string + mapvalue := &ArtifactResult{} + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapmsglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapmsglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if mapmsglen < 0 { + return ErrInvalidLengthGenerated + } + postmsgIndex := iNdEx + mapmsglen + if postmsgIndex < 0 { + return ErrInvalidLengthGenerated + } + if postmsgIndex > l { + return io.ErrUnexpectedEOF + } + mapvalue = &ArtifactResult{} + if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { + return err + } + iNdEx = postmsgIndex + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.ArtifactResults[mapkey] = *mapvalue + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactSearchQuery) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactSearchQuery: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactSearchQuery: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactGCStrategies", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactGCStrategies == nil { + m.ArtifactGCStrategies = make(map[ArtifactGCStrategy]bool) + } + var mapkey ArtifactGCStrategy + var mapvalue bool + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = ArtifactGCStrategy(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapvaluetemp int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapvaluetemp |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + mapvalue = bool(mapvaluetemp != 0) + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.ArtifactGCStrategies[ArtifactGCStrategy(mapkey)] = mapvalue iNdEx = postIndex - case 4: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field HTTP", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactName", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17334,33 +22464,29 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.HTTP == nil { - m.HTTP = &HTTPArtifact{} - } - if err := m.HTTP.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.ArtifactName = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 5: + case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Artifactory", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field TemplateName", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17370,33 +22496,29 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.Artifactory == nil { - m.Artifactory = &ArtifactoryArtifact{} - } - if err := m.Artifactory.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.TemplateName = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 6: + case 4: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field HDFS", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field NodeId", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17406,33 +22528,29 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.HDFS == nil { - m.HDFS = &HDFSArtifact{} - } - if err := m.HDFS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.NodeId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 7: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Raw", wireType) + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Deleted", wireType) } - var msglen int + var v int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17442,31 +22560,16 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + v |= int(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Raw == nil { - m.Raw = &RawArtifact{} - } - if err := m.Raw.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 8: + b := bool(v != 0) + m.Deleted = &b + case 6: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field OSS", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field NodeTypes", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17493,48 +22596,91 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.OSS == nil { - m.OSS = &OSSArtifact{} - } - if err := m.OSS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 9: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field GCS", wireType) + if m.NodeTypes == nil { + m.NodeTypes = make(map[NodeType]bool) } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF + var mapkey NodeType + var mapvalue bool + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthGenerated + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthGenerated + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = NodeType(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapvaluetemp int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapvaluetemp |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + mapvalue = bool(mapvaluetemp != 0) + } else { + iNdEx = entryPreIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy } } - if msglen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.GCS == nil { - m.GCS = &GCSArtifact{} - } - if err := m.GCS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.NodeTypes[NodeType(mapkey)] = mapvalue iNdEx = postIndex default: iNdEx = preIndex @@ -17557,7 +22703,7 @@ func (m *ArtifactLocation) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactPaths) Unmarshal(dAtA []byte) error { +func (m *ArtifactSearchResult) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -17580,10 +22726,10 @@ func (m *ArtifactPaths) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactPaths: wiretype end group for non-group") + return fmt.Errorf("proto: ArtifactSearchResult: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactPaths: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ArtifactSearchResult: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -17619,6 +22765,38 @@ func (m *ArtifactPaths) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field NodeID", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.NodeID = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -17640,7 +22818,7 @@ func (m *ArtifactPaths) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { +func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -17663,38 +22841,17 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactRepository: wiretype end group for non-group") + return fmt.Errorf("proto: ArtifactoryArtifact: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactRepository: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ArtifactoryArtifact: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field ArchiveLogs", wireType) - } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - b := bool(v != 0) - m.ArchiveLogs = &b - case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field S3", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17704,31 +22861,27 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.S3 == nil { - m.S3 = &S3ArtifactRepository{} - } - if err := m.S3.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.URL = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Artifactory", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactoryAuth", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17755,52 +22908,63 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Artifactory == nil { - m.Artifactory = &ArtifactoryArtifactRepository{} - } - if err := m.Artifactory.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.ArtifactoryAuth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field HDFS", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthGenerated + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err } - postIndex := iNdEx + msglen - if postIndex < 0 { + if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthGenerated } - if postIndex > l { + if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - if m.HDFS == nil { - m.HDFS = &HDFSArtifactRepository{} + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated } - if err := m.HDFS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err + if iNdEx >= l { + return io.ErrUnexpectedEOF } - iNdEx = postIndex - case 5: + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ArtifactoryArtifactRepository: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ArtifactoryArtifactRepository: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field OSS", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactoryAuth", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -17827,18 +22991,15 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.OSS == nil { - m.OSS = &OSSArtifactRepository{} - } - if err := m.OSS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.ArtifactoryAuth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 6: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field GCS", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field RepoURL", wireType) } - var msglen int + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17848,27 +23009,23 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - if m.GCS == nil { - m.GCS = &GCSArtifactRepository{} - } - if err := m.GCS.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } + m.RepoURL = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex @@ -17891,7 +23048,7 @@ func (m *ArtifactRepository) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { +func (m *ArtifactoryAuth) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -17914,17 +23071,17 @@ func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactRepositoryRef: wiretype end group for non-group") + return fmt.Errorf("proto: ArtifactoryAuth: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactRepositoryRef: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ArtifactoryAuth: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ConfigMap", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field UsernameSecret", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17934,29 +23091,33 @@ func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.ConfigMap = string(dAtA[iNdEx:postIndex]) + if m.UsernameSecret == nil { + m.UsernameSecret = &v1.SecretKeySelector{} + } + if err := m.UsernameSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PasswordSecret", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -17966,23 +23127,27 @@ func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.Key = string(dAtA[iNdEx:postIndex]) + if m.PasswordSecret == nil { + m.PasswordSecret = &v1.SecretKeySelector{} + } + if err := m.PasswordSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex default: iNdEx = preIndex @@ -18005,7 +23170,7 @@ func (m *ArtifactRepositoryRef) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { +func (m *AzureArtifact) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18028,15 +23193,15 @@ func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactRepositoryRefStatus: wiretype end group for non-group") + return fmt.Errorf("proto: AzureArtifact: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactRepositoryRefStatus: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: AzureArtifact: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ArtifactRepositoryRef", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field AzureBlobContainer", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18063,13 +23228,13 @@ func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.ArtifactRepositoryRef.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.AzureBlobContainer.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Blob", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -18097,31 +23262,61 @@ func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Namespace = string(dAtA[iNdEx:postIndex]) + m.Blob = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Default", wireType) + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated } - m.Default = bool(v != 0) - case 4: + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *AzureArtifactRepository) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: AzureArtifactRepository: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: AzureArtifactRepository: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ArtifactRepository", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field AzureBlobContainer", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18148,13 +23343,42 @@ func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.ArtifactRepository == nil { - m.ArtifactRepository = &ArtifactRepository{} - } - if err := m.ArtifactRepository.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.AzureBlobContainer.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field BlobNameFormat", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.BlobNameFormat = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -18176,7 +23400,7 @@ func (m *ArtifactRepositoryRefStatus) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { +func (m *AzureBlobContainer) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18199,15 +23423,15 @@ func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactoryArtifact: wiretype end group for non-group") + return fmt.Errorf("proto: AzureBlobContainer: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactoryArtifact: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: AzureBlobContainer: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Endpoint", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -18235,11 +23459,43 @@ func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.URL = string(dAtA[iNdEx:postIndex]) + m.Endpoint = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ArtifactoryAuth", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Container", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Container = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field AccountKeySecret", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18266,10 +23522,33 @@ func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.ArtifactoryAuth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.AccountKeySecret == nil { + m.AccountKeySecret = &v1.SecretKeySelector{} + } + if err := m.AccountKeySecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field UseSDKCreds", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.UseSDKCreds = bool(v != 0) default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -18291,7 +23570,7 @@ func (m *ArtifactoryArtifact) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { +func (m *Backoff) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18314,15 +23593,47 @@ func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactoryArtifactRepository: wiretype end group for non-group") + return fmt.Errorf("proto: Backoff: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactoryArtifactRepository: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: Backoff: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ArtifactoryAuth", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Duration", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Duration = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Factor", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18349,13 +23660,16 @@ func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.ArtifactoryAuth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.Factor == nil { + m.Factor = &intstr.IntOrString{} + } + if err := m.Factor.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 2: + case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field RepoURL", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field MaxDuration", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -18383,7 +23697,7 @@ func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.RepoURL = string(dAtA[iNdEx:postIndex]) + m.MaxDuration = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex @@ -18406,7 +23720,7 @@ func (m *ArtifactoryArtifactRepository) Unmarshal(dAtA []byte) error { } return nil } -func (m *ArtifactoryAuth) Unmarshal(dAtA []byte) error { +func (m *BasicAuth) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18429,10 +23743,10 @@ func (m *ArtifactoryAuth) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ArtifactoryAuth: wiretype end group for non-group") + return fmt.Errorf("proto: BasicAuth: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ArtifactoryAuth: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: BasicAuth: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -18528,7 +23842,7 @@ func (m *ArtifactoryAuth) Unmarshal(dAtA []byte) error { } return nil } -func (m *Backoff) Unmarshal(dAtA []byte) error { +func (m *Cache) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18551,47 +23865,15 @@ func (m *Backoff) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Backoff: wiretype end group for non-group") + return fmt.Errorf("proto: Cache: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Backoff: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: Cache: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Duration", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Duration = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Factor", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ConfigMap", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18618,45 +23900,13 @@ func (m *Backoff) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Factor == nil { - m.Factor = &intstr.IntOrString{} + if m.ConfigMap == nil { + m.ConfigMap = &v1.ConfigMapKeySelector{} } - if err := m.Factor.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.ConfigMap.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field MaxDuration", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.MaxDuration = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -18678,7 +23928,7 @@ func (m *Backoff) Unmarshal(dAtA []byte) error { } return nil } -func (m *Cache) Unmarshal(dAtA []byte) error { +func (m *ClientCertAuth) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -18701,15 +23951,15 @@ func (m *Cache) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Cache: wiretype end group for non-group") + return fmt.Errorf("proto: ClientCertAuth: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Cache: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ClientCertAuth: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ConfigMap", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ClientCertSecret", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -18736,10 +23986,46 @@ func (m *Cache) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.ConfigMap == nil { - m.ConfigMap = &v1.ConfigMapKeySelector{} + if m.ClientCertSecret == nil { + m.ClientCertSecret = &v1.SecretKeySelector{} } - if err := m.ConfigMap.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.ClientCertSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKeySecret", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ClientKeySecret == nil { + m.ClientKeySecret = &v1.SecretKeySelector{} + } + if err := m.ClientKeySecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -22349,6 +27635,58 @@ func (m *GitArtifact) Unmarshal(dAtA []byte) error { } } m.DisableSubmodules = bool(v != 0) + case 10: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field SingleBranch", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.SingleBranch = bool(v != 0) + case 11: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Branch", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Branch = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -23064,17 +28402,457 @@ func (m *HTTP) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: HTTP: wiretype end group for non-group") + return fmt.Errorf("proto: HTTP: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: HTTP: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Method", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Method = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.URL = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, HTTPHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TimeoutSeconds", wireType) + } + var v int64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.TimeoutSeconds = &v + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Body", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Body = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field SuccessCondition", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.SuccessCondition = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 7: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field InsecureSkipVerify", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.InsecureSkipVerify = bool(v != 0) + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field BodyFrom", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.BodyFrom == nil { + m.BodyFrom = &HTTPBodySource{} + } + if err := m.BodyFrom.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *HTTPArtifact) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: HTTPArtifact: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: HTTPArtifact: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.URL = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, Header{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Auth", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Auth == nil { + m.Auth = &HTTPAuth{} + } + if err := m.Auth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *HTTPAuth) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: HTTPAuth: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: HTTP: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: HTTPAuth: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Method", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ClientCert", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -23084,59 +28862,28 @@ func (m *HTTP) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.Method = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF + if err := m.ClientCert.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err } - m.URL = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field OAuth2", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -23163,36 +28910,15 @@ func (m *HTTP) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, HTTPHeader{}) - if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.OAuth2.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 4: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field TimeoutSeconds", wireType) - } - var v int64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.TimeoutSeconds = &v - case 5: + case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Body", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field BasicAuth", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -23202,76 +28928,25 @@ func (m *HTTP) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.Body = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field SuccessCondition", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF + if err := m.BasicAuth.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err } - m.SuccessCondition = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 7: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field InsecureSkipVerify", wireType) - } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.InsecureSkipVerify = bool(v != 0) default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -23293,7 +28968,7 @@ func (m *HTTP) Unmarshal(dAtA []byte) error { } return nil } -func (m *HTTPArtifact) Unmarshal(dAtA []byte) error { +func (m *HTTPBodySource) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -23316,49 +28991,17 @@ func (m *HTTPArtifact) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: HTTPArtifact: wiretype end group for non-group") + return fmt.Errorf("proto: HTTPBodySource: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: HTTPArtifact: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: HTTPBodySource: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthGenerated - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthGenerated - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.URL = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Bytes", wireType) } - var msglen int + var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -23368,24 +29011,24 @@ func (m *HTTPArtifact) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - msglen |= int(b&0x7F) << shift + byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } - if msglen < 0 { + if byteLen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + msglen + postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, Header{}) - if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err + m.Bytes = append(m.Bytes[:0], dAtA[iNdEx:postIndex]...) + if m.Bytes == nil { + m.Bytes = []byte{} } iNdEx = postIndex default: @@ -24652,6 +30295,92 @@ func (m *Link) Unmarshal(dAtA []byte) error { } return nil } +func (m *ManifestFrom) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ManifestFrom: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ManifestFrom: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Artifact", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Artifact == nil { + m.Artifact = &Artifact{} + } + if err := m.Artifact.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *MemoizationStatus) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -26716,18 +32445,323 @@ func (m *NodeStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.MemoizationStatus == nil { - m.MemoizationStatus = &MemoizationStatus{} + if m.MemoizationStatus == nil { + m.MemoizationStatus = &MemoizationStatus{} + } + if err := m.MemoizationStatus.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 24: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field EstimatedDuration", wireType) + } + m.EstimatedDuration = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.EstimatedDuration |= EstimatedDuration(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 25: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field SynchronizationStatus", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.SynchronizationStatus == nil { + m.SynchronizationStatus = &NodeSynchronizationStatus{} + } + if err := m.SynchronizationStatus.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 26: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Progress", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Progress = Progress(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *NodeSynchronizationStatus) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: NodeSynchronizationStatus: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: NodeSynchronizationStatus: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Waiting", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Waiting = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *NoneStrategy) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: NoneStrategy: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: NoneStrategy: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *OAuth2Auth) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: OAuth2Auth: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: OAuth2Auth: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientIDSecret", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ClientIDSecret == nil { + m.ClientIDSecret = &v1.SecretKeySelector{} } - if err := m.MemoizationStatus.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.ClientIDSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 24: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field EstimatedDuration", wireType) + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientSecretSecret", wireType) } - m.EstimatedDuration = 0 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -26737,14 +32771,31 @@ func (m *NodeStatus) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.EstimatedDuration |= EstimatedDuration(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - case 25: + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ClientSecretSecret == nil { + m.ClientSecretSecret = &v1.SecretKeySelector{} + } + if err := m.ClientSecretSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field SynchronizationStatus", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field TokenURLSecret", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -26771,16 +32822,16 @@ func (m *NodeStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.SynchronizationStatus == nil { - m.SynchronizationStatus = &NodeSynchronizationStatus{} + if m.TokenURLSecret == nil { + m.TokenURLSecret = &v1.SecretKeySelector{} } - if err := m.SynchronizationStatus.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := m.TokenURLSecret.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 26: + case 5: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Progress", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Scopes", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -26808,7 +32859,41 @@ func (m *NodeStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Progress = Progress(dAtA[iNdEx:postIndex]) + m.Scopes = append(m.Scopes, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EndpointParams", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EndpointParams = append(m.EndpointParams, OAuth2EndpointParam{}) + if err := m.EndpointParams[len(m.EndpointParams)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex default: iNdEx = preIndex @@ -26831,7 +32916,7 @@ func (m *NodeStatus) Unmarshal(dAtA []byte) error { } return nil } -func (m *NodeSynchronizationStatus) Unmarshal(dAtA []byte) error { +func (m *OAuth2EndpointParam) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -26854,15 +32939,15 @@ func (m *NodeSynchronizationStatus) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: NodeSynchronizationStatus: wiretype end group for non-group") + return fmt.Errorf("proto: OAuth2EndpointParam: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: NodeSynchronizationStatus: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: OAuth2EndpointParam: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Waiting", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -26890,58 +32975,40 @@ func (m *NodeSynchronizationStatus) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Waiting = string(dAtA[iNdEx:postIndex]) + m.Key = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipGenerated(dAtA[iNdEx:]) - if err != nil { - return err + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthGenerated + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *NoneStrategy) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenerated + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated } - if iNdEx >= l { + if postIndex > l { return io.ErrUnexpectedEOF } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: NoneStrategy: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: NoneStrategy: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { + m.Value = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -28966,6 +35033,42 @@ func (m *ResourceTemplate) Unmarshal(dAtA []byte) error { } m.Flags = append(m.Flags, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ManifestFrom", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ManifestFrom == nil { + m.ManifestFrom = &ManifestFrom{} + } + if err := m.ManifestFrom.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -34292,17 +40395,99 @@ func (m *VolumeClaimGC) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: VolumeClaimGC: wiretype end group for non-group") + return fmt.Errorf("proto: VolumeClaimGC: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: VolumeClaimGC: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Strategy", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Strategy = VolumeClaimGCStrategy(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Workflow) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Workflow: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: VolumeClaimGC: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: Workflow: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Strategy", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenerated @@ -34312,23 +40497,90 @@ func (m *VolumeClaimGC) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenerated } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenerated } if postIndex > l { return io.ErrUnexpectedEOF } - m.Strategy = VolumeClaimGCStrategy(dAtA[iNdEx:postIndex]) + if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Status.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex default: iNdEx = preIndex @@ -34351,7 +40603,7 @@ func (m *VolumeClaimGC) Unmarshal(dAtA []byte) error { } return nil } -func (m *Workflow) Unmarshal(dAtA []byte) error { +func (m *WorkflowArtifactGCTask) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -34374,10 +40626,10 @@ func (m *Workflow) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Workflow: wiretype end group for non-group") + return fmt.Errorf("proto: WorkflowArtifactGCTask: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Workflow: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: WorkflowArtifactGCTask: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -34500,6 +40752,123 @@ func (m *Workflow) Unmarshal(dAtA []byte) error { } return nil } +func (m *WorkflowArtifactGCTaskList) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: WorkflowArtifactGCTaskList: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: WorkflowArtifactGCTaskList: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ListMeta", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.ListMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Items", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Items = append(m.Items, WorkflowArtifactGCTask{}) + if err := m.Items[len(m.Items)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenerated(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenerated + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *WorkflowEventBinding) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -36921,6 +43290,42 @@ func (m *WorkflowSpec) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 43: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactGC", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactGC == nil { + m.ArtifactGC = &ArtifactGC{} + } + if err := m.ArtifactGC.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) @@ -37799,6 +44204,42 @@ func (m *WorkflowStatus) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 19: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ArtifactGCStatus", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.ArtifactGCStatus == nil { + m.ArtifactGCStatus = &ArtGCStatus{} + } + if err := m.ArtifactGCStatus.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) diff --git a/pkg/apis/workflow/v1alpha1/generated.proto b/pkg/apis/workflow/v1alpha1/generated.proto index 5cdaaa958416..d4025f8d9dd2 100644 --- a/pkg/apis/workflow/v1alpha1/generated.proto +++ b/pkg/apis/workflow/v1alpha1/generated.proto @@ -43,6 +43,19 @@ message Arguments { repeated Artifact artifacts = 2; } +// ArtGCStatus maintains state related to ArtifactGC +message ArtGCStatus { + // have Pods been started to perform this strategy? (enables us not to re-process what we've already done) + map strategiesProcessed = 1; + + // have completed Pods been processed? (mapped by Pod name) + // used to prevent re-processing the Status of a Pod more than once + map podsRecouped = 2; + + // if this is true, we already checked to see if we need to do it and we don't + optional bool notSpecified = 3; +} + // Artifact indicates an artifact to place at a specified path message Artifact { // name of the artifact. must be unique within a template's inputs/outputs. @@ -79,6 +92,37 @@ message Artifact { // FromExpression, if defined, is evaluated to specify the value for the artifact optional string fromExpression = 11; + + // ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows + optional ArtifactGC artifactGC = 12; + + // Has this been deleted? + optional bool deleted = 13; +} + +// ArtifactGC describes how to delete artifacts from completed Workflows +message ArtifactGC { + // Strategy is the strategy to use. + // +kubebuilder:validation:Enum="";OnWorkflowCompletion;OnWorkflowDeletion;Never + optional string strategy = 1; + + // PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion + optional Metadata podMetadata = 2; + + // ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion + optional string serviceAccountName = 3; +} + +// ArtifactGCSpec specifies the Artifacts that need to be deleted +message ArtifactGCSpec { + // ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node + map artifactsByNode = 1; +} + +// ArtifactGCStatus describes the result of the deletion +message ArtifactGCStatus { + // ArtifactResultsByNode maps Node name to result + map artifactResultsByNode = 1; } // ArtifactLocation describes a location for a single or multiple artifacts. @@ -112,6 +156,18 @@ message ArtifactLocation { // GCS contains GCS artifact location details optional GCSArtifact gcs = 9; + + // Azure contains Azure Storage artifact location details + optional AzureArtifact azure = 10; +} + +// ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node +message ArtifactNodeSpec { + // ArchiveLocation is the template-level Artifact location specification + optional ArtifactLocation archiveLocation = 1; + + // Artifacts maps artifact name to Artifact description + map artifacts = 2; } // ArtifactPaths expands a step from a collection of artifacts @@ -139,6 +195,9 @@ message ArtifactRepository { // GCS stores artifact in a GCS object store optional GCSArtifactRepository gcs = 6; + + // Azure stores artifact in an Azure Storage account + optional AzureArtifactRepository azure = 7; } // +protobuf.options.(gogoproto.goproto_stringer)=false @@ -164,6 +223,44 @@ message ArtifactRepositoryRefStatus { optional ArtifactRepository artifactRepository = 4; } +// ArtifactResult describes the result of attempting to delete a given Artifact +message ArtifactResult { + // Name is the name of the Artifact + optional string name = 1; + + // Success describes whether the deletion succeeded + optional bool success = 2; + + // Error is an optional error message which should be set if Success==false + optional string error = 3; +} + +// ArtifactResultNodeStatus describes the result of the deletion on a given node +message ArtifactResultNodeStatus { + // ArtifactResults maps Artifact name to result of the deletion + map artifactResults = 1; +} + +message ArtifactSearchQuery { + map artifactGCStrategies = 1; + + optional string artifactName = 2; + + optional string templateName = 3; + + optional string nodeId = 4; + + optional bool deleted = 5; + + map nodeTypes = 6; +} + +message ArtifactSearchResult { + optional Artifact artifact = 1; + + optional string nodeID = 2; +} + // ArtifactoryArtifact is the location of an artifactory artifact message ArtifactoryArtifact { // URL of the artifact @@ -189,6 +286,37 @@ message ArtifactoryAuth { optional k8s.io.api.core.v1.SecretKeySelector passwordSecret = 2; } +// AzureArtifact is the location of a an Azure Storage artifact +message AzureArtifact { + optional AzureBlobContainer azureBlobContainer = 1; + + // Blob is the blob name (i.e., path) in the container where the artifact resides + optional string blob = 2; +} + +// AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository +message AzureArtifactRepository { + optional AzureBlobContainer blobContainer = 1; + + // BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables + optional string blobNameFormat = 2; +} + +// AzureBlobContainer contains the access information for interfacing with an Azure Blob Storage container +message AzureBlobContainer { + // Endpoint is the service url associated with an account. It is most likely "https://.blob.core.windows.net" + optional string endpoint = 1; + + // Container is the container where resources will be stored + optional string container = 2; + + // AccountKeySecret is the secret selector to the Azure Blob Storage account access key + optional k8s.io.api.core.v1.SecretKeySelector accountKeySecret = 3; + + // UseSDKCreds tells the driver to figure out credentials based on sdk defaults. + optional bool useSDKCreds = 4; +} + // Backoff is a backoff strategy to use within retryStrategy message Backoff { // Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. "2m", "1h") @@ -201,12 +329,28 @@ message Backoff { optional string maxDuration = 3; } +// BasicAuth describes the secret selectors required for basic authentication +message BasicAuth { + // UsernameSecret is the secret selector to the repository username + optional k8s.io.api.core.v1.SecretKeySelector usernameSecret = 1; + + // PasswordSecret is the secret selector to the repository password + optional k8s.io.api.core.v1.SecretKeySelector passwordSecret = 2; +} + // Cache is the configuration for the type of cache to be used message Cache { // ConfigMap sets a ConfigMap-based cache optional k8s.io.api.core.v1.ConfigMapKeySelector configMap = 1; } +// ClientCertAuth holds necessary information for client authentication via certificates +message ClientCertAuth { + optional k8s.io.api.core.v1.SecretKeySelector clientCertSecret = 1; + + optional k8s.io.api.core.v1.SecretKeySelector clientKeySecret = 2; +} + // ClusterWorkflowTemplate is the definition of a workflow template resource in cluster scope // +genclient // +genclient:noStatus @@ -508,6 +652,12 @@ message GitArtifact { // DisableSubmodules disables submodules during git clone optional bool disableSubmodules = 9; + + // SingleBranch enables single branch clone, using the `branch` parameter + optional bool singleBranch = 10; + + // Branch is the branch to fetch when `SingleBranch` is enabled + optional string branch = 11; } // HDFSArtifact is the location of an HDFS artifact @@ -590,17 +740,36 @@ message HTTP { // Body is content of the HTTP Request optional string body = 5; - // insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client + // BodyFrom is content of the HTTP Request as Bytes + optional HTTPBodySource bodyFrom = 8; + + // InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client optional bool insecureSkipVerify = 7; } -// HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +// HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container message HTTPArtifact { // URL of the artifact optional string url = 1; // Headers are an optional list of headers to send with HTTP requests for artifacts repeated Header headers = 2; + + // Auth contains information for client authentication + optional HTTPAuth auth = 3; +} + +message HTTPAuth { + optional ClientCertAuth clientCert = 1; + + optional OAuth2Auth oauth2 = 2; + + optional BasicAuth basicAuth = 3; +} + +// HTTPBodySource contains the source of the HTTP body. +message HTTPBodySource { + optional bytes bytes = 1; } message HTTPHeader { @@ -698,6 +867,11 @@ message Link { optional string url = 3; } +message ManifestFrom { + // Artifact contains the artifact to use + optional Artifact artifact = 1; +} + // MemoizationStatus is the status of this memoized node message MemoizationStatus { // Hit indicates whether this node was created from a cache entry @@ -888,6 +1062,28 @@ message NodeSynchronizationStatus { message NoneStrategy { } +// OAuth2Auth holds all information for client authentication via OAuth2 tokens +message OAuth2Auth { + optional k8s.io.api.core.v1.SecretKeySelector clientIDSecret = 1; + + optional k8s.io.api.core.v1.SecretKeySelector clientSecretSecret = 2; + + optional k8s.io.api.core.v1.SecretKeySelector tokenURLSecret = 3; + + repeated string scopes = 5; + + repeated OAuth2EndpointParam endpointParams = 6; +} + +// EndpointParam is for requesting optional fields that should be sent in the oauth request +message OAuth2EndpointParam { + // Name is the header name + optional string key = 1; + + // Value is the literal value to use for the header + optional string value = 2; +} + // OSSArtifact is the location of an Alibaba Cloud OSS artifact message OSSArtifact { optional OSSBucket oSSBucket = 1; @@ -1048,6 +1244,9 @@ message ResourceTemplate { // Manifest contains the kubernetes manifest optional string manifest = 3; + // ManifestFrom is the source for a single kubernetes manifest + optional ManifestFrom manifestFrom = 8; + // SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. optional bool setOwnerReference = 4; @@ -1079,7 +1278,8 @@ message RetryNodeAntiAffinity { // RetryStrategy provides controls on how to retry a workflow step message RetryStrategy { - // Limit is the maximum number of attempts when retrying a container + // Limit is the maximum number of retry attempts when retrying a container. It does not include the original + // container; the maximum number of total attempts will be `limit + 1`. optional k8s.io.apimachinery.pkg.util.intstr.IntOrString limit = 1; // RetryPolicy is a policy of NodePhase statuses that will be retried @@ -1545,6 +1745,7 @@ message VolumeClaimGC { // +kubebuilder:resource:shortName=wf // +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase",description="Status of the workflow" // +kubebuilder:printcolumn:name="Age",type="date",format="date-time",JSONPath=".status.startedAt",description="When the workflow was started" +// +kubebuilder:printcolumn:name="Message",type="string",JSONPath=".status.message",description="Human readable message indicating details about why the workflow is in this condition." // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object message Workflow { optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; @@ -1554,6 +1755,27 @@ message Workflow { optional WorkflowStatus status = 3; } +// WorkflowArtifactGCTask specifies the Artifacts that need to be deleted as well as the status of deletion +// +genclient +// +kubebuilder:resource:shortName=wfat +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +kubebuilder:subresource:status +message WorkflowArtifactGCTask { + optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; + + optional ArtifactGCSpec spec = 2; + + optional ArtifactGCStatus status = 3; +} + +// WorkflowArtifactGCTaskList is list of WorkflowArtifactGCTask resources +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +message WorkflowArtifactGCTaskList { + optional k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta metadata = 1; + + repeated WorkflowArtifactGCTask items = 2; +} + // WorkflowEventBinding is the definition of an event resource // +genclient // +genclient:noStatus @@ -1706,13 +1928,14 @@ message WorkflowSpec { // +optional optional string schedulerName = 21; - // PodGC describes the strategy to use when to deleting completed pods + // PodGC describes the strategy to use when deleting completed pods optional PodGC podGC = 22; // PriorityClassName to apply to workflow pods. optional string podPriorityClassName = 23; // Priority to apply to workflow pods. + // DEPRECATED: Use PodPriorityClassName instead. optional int32 podPriority = 24; // +patchStrategy=merge @@ -1746,7 +1969,7 @@ message WorkflowSpec { // Synchronization holds synchronization lock configuration for this Workflow optional Synchronization synchronization = 35; - // VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows + // VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows optional VolumeClaimGC volumeClaimGC = 36; // RetryStrategy for all templates in the workflow. @@ -1765,8 +1988,12 @@ message WorkflowSpec { // step, irrespective of the success, failure, or error status of the primary step map hooks = 41; - // WorkflowMetadata contains some metadata of the workflow to be refer + // WorkflowMetadata contains some metadata of the workflow to refer to optional WorkflowMetadata workflowMetadata = 42; + + // ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts + // unless Artifact.ArtifactGC is specified, which overrides this) + optional ArtifactGC artifactGC = 43; } // WorkflowStatus contains overall status information about a workflow @@ -1823,6 +2050,9 @@ message WorkflowStatus { // ArtifactRepositoryRef is used to cache the repository to use so we do not need to determine it everytime we reconcile. optional ArtifactRepositoryRefStatus artifactRepositoryRef = 18; + + // ArtifactGCStatus maintains the status of Artifact Garbage Collection + optional ArtGCStatus artifactGCStatus = 19; } // WorkflowStep is a reference to a template to execute in a series of step diff --git a/pkg/apis/workflow/v1alpha1/http_types.go b/pkg/apis/workflow/v1alpha1/http_types.go index 5468e459899f..79f9c3e29ec9 100644 --- a/pkg/apis/workflow/v1alpha1/http_types.go +++ b/pkg/apis/workflow/v1alpha1/http_types.go @@ -12,6 +12,11 @@ type HTTPHeaderSource struct { type HTTPHeaders []HTTPHeader +// HTTPBodySource contains the source of the HTTP body. +type HTTPBodySource struct { + Bytes []byte `json:"bytes,omitempty" protobuf:"bytes,1,opt,name=bytes"` +} + func (h HTTPHeaders) ToHeader() http.Header { outHeader := make(http.Header) for _, header := range h { @@ -43,6 +48,15 @@ type HTTP struct { SuccessCondition string `json:"successCondition,omitempty" protobuf:"bytes,6,opt,name=successCondition"` // Body is content of the HTTP Request Body string `json:"body,omitempty" protobuf:"bytes,5,opt,name=body"` - // insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client + // BodyFrom is content of the HTTP Request as Bytes + BodyFrom *HTTPBodySource `json:"bodyFrom,omitempty" protobuf:"bytes,8,opt,name=bodyFrom"` + // InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty" protobuf:"bytes,7,opt,name=insecureSkipVerify"` } + +func (h *HTTP) GetBodyBytes() []byte { + if h.BodyFrom != nil { + return h.BodyFrom.Bytes + } + return nil +} diff --git a/pkg/apis/workflow/v1alpha1/marshall.go b/pkg/apis/workflow/v1alpha1/marshall.go index b2449bfd2d2a..f851806943f9 100644 --- a/pkg/apis/workflow/v1alpha1/marshall.go +++ b/pkg/apis/workflow/v1alpha1/marshall.go @@ -78,3 +78,9 @@ func MustUnmarshalWorkflowTemplate(text interface{}) *WorkflowTemplate { MustUnmarshal(text, &x) return x } + +func MustUnmarshalWorkflowArtifactGCTask(text interface{}) *WorkflowArtifactGCTask { + x := &WorkflowArtifactGCTask{} + MustUnmarshal(text, &x) + return x +} diff --git a/pkg/apis/workflow/v1alpha1/openapi_generated.go b/pkg/apis/workflow/v1alpha1/openapi_generated.go index 5cfee1384c5f..66ad4cd339a2 100644 --- a/pkg/apis/workflow/v1alpha1/openapi_generated.go +++ b/pkg/apis/workflow/v1alpha1/openapi_generated.go @@ -17,17 +17,31 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Amount": schema_pkg_apis_workflow_v1alpha1_Amount(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArchiveStrategy": schema_pkg_apis_workflow_v1alpha1_ArchiveStrategy(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Arguments": schema_pkg_apis_workflow_v1alpha1_Arguments(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtGCStatus": schema_pkg_apis_workflow_v1alpha1_ArtGCStatus(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact": schema_pkg_apis_workflow_v1alpha1_Artifact(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC": schema_pkg_apis_workflow_v1alpha1_ArtifactGC(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCSpec": schema_pkg_apis_workflow_v1alpha1_ArtifactGCSpec(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCStatus": schema_pkg_apis_workflow_v1alpha1_ArtifactGCStatus(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactLocation": schema_pkg_apis_workflow_v1alpha1_ArtifactLocation(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactNodeSpec": schema_pkg_apis_workflow_v1alpha1_ArtifactNodeSpec(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactPaths": schema_pkg_apis_workflow_v1alpha1_ArtifactPaths(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepository": schema_pkg_apis_workflow_v1alpha1_ArtifactRepository(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRef": schema_pkg_apis_workflow_v1alpha1_ArtifactRepositoryRef(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRefStatus": schema_pkg_apis_workflow_v1alpha1_ArtifactRepositoryRefStatus(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResult": schema_pkg_apis_workflow_v1alpha1_ArtifactResult(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResultNodeStatus": schema_pkg_apis_workflow_v1alpha1_ArtifactResultNodeStatus(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactSearchQuery": schema_pkg_apis_workflow_v1alpha1_ArtifactSearchQuery(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactSearchResult": schema_pkg_apis_workflow_v1alpha1_ArtifactSearchResult(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact": schema_pkg_apis_workflow_v1alpha1_ArtifactoryArtifact(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifactRepository": schema_pkg_apis_workflow_v1alpha1_ArtifactoryArtifactRepository(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryAuth": schema_pkg_apis_workflow_v1alpha1_ArtifactoryAuth(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact": schema_pkg_apis_workflow_v1alpha1_AzureArtifact(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifactRepository": schema_pkg_apis_workflow_v1alpha1_AzureArtifactRepository(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureBlobContainer": schema_pkg_apis_workflow_v1alpha1_AzureBlobContainer(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Backoff": schema_pkg_apis_workflow_v1alpha1_Backoff(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.BasicAuth": schema_pkg_apis_workflow_v1alpha1_BasicAuth(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Cache": schema_pkg_apis_workflow_v1alpha1_Cache(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ClientCertAuth": schema_pkg_apis_workflow_v1alpha1_ClientCertAuth(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ClusterWorkflowTemplate": schema_pkg_apis_workflow_v1alpha1_ClusterWorkflowTemplate(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ClusterWorkflowTemplateList": schema_pkg_apis_workflow_v1alpha1_ClusterWorkflowTemplateList(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Condition": schema_pkg_apis_workflow_v1alpha1_Condition(ref), @@ -58,6 +72,8 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSKrbConfig": schema_pkg_apis_workflow_v1alpha1_HDFSKrbConfig(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTP": schema_pkg_apis_workflow_v1alpha1_HTTP(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact": schema_pkg_apis_workflow_v1alpha1_HTTPArtifact(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPAuth": schema_pkg_apis_workflow_v1alpha1_HTTPAuth(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPBodySource": schema_pkg_apis_workflow_v1alpha1_HTTPBodySource(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPHeader": schema_pkg_apis_workflow_v1alpha1_HTTPHeader(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPHeaderSource": schema_pkg_apis_workflow_v1alpha1_HTTPHeaderSource(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Header": schema_pkg_apis_workflow_v1alpha1_Header(ref), @@ -69,6 +85,7 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.LabelValues": schema_pkg_apis_workflow_v1alpha1_LabelValues(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.LifecycleHook": schema_pkg_apis_workflow_v1alpha1_LifecycleHook(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Link": schema_pkg_apis_workflow_v1alpha1_Link(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ManifestFrom": schema_pkg_apis_workflow_v1alpha1_ManifestFrom(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.MemoizationStatus": schema_pkg_apis_workflow_v1alpha1_MemoizationStatus(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Memoize": schema_pkg_apis_workflow_v1alpha1_Memoize(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metadata": schema_pkg_apis_workflow_v1alpha1_Metadata(ref), @@ -81,6 +98,8 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.NodeStatus": schema_pkg_apis_workflow_v1alpha1_NodeStatus(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.NodeSynchronizationStatus": schema_pkg_apis_workflow_v1alpha1_NodeSynchronizationStatus(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.NoneStrategy": schema_pkg_apis_workflow_v1alpha1_NoneStrategy(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2Auth": schema_pkg_apis_workflow_v1alpha1_OAuth2Auth(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2EndpointParam": schema_pkg_apis_workflow_v1alpha1_OAuth2EndpointParam(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact": schema_pkg_apis_workflow_v1alpha1_OSSArtifact(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifactRepository": schema_pkg_apis_workflow_v1alpha1_OSSArtifactRepository(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSBucket": schema_pkg_apis_workflow_v1alpha1_OSSBucket(ref), @@ -122,6 +141,8 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Version": schema_pkg_apis_workflow_v1alpha1_Version(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.VolumeClaimGC": schema_pkg_apis_workflow_v1alpha1_VolumeClaimGC(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Workflow": schema_pkg_apis_workflow_v1alpha1_Workflow(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowArtifactGCTask": schema_pkg_apis_workflow_v1alpha1_WorkflowArtifactGCTask(ref), + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowArtifactGCTaskList": schema_pkg_apis_workflow_v1alpha1_WorkflowArtifactGCTaskList(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowEventBinding": schema_pkg_apis_workflow_v1alpha1_WorkflowEventBinding(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowEventBindingList": schema_pkg_apis_workflow_v1alpha1_WorkflowEventBindingList(ref), "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowEventBindingSpec": schema_pkg_apis_workflow_v1alpha1_WorkflowEventBindingSpec(ref), @@ -240,6 +261,58 @@ func schema_pkg_apis_workflow_v1alpha1_Arguments(ref common.ReferenceCallback) c } } +func schema_pkg_apis_workflow_v1alpha1_ArtGCStatus(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtGCStatus maintains state related to ArtifactGC", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "strategiesProcessed": { + SchemaProps: spec.SchemaProps{ + Description: "have Pods been started to perform this strategy? (enables us not to re-process what we've already done)", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: false, + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + }, + }, + "podsRecouped": { + SchemaProps: spec.SchemaProps{ + Description: "have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: false, + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + }, + }, + "notSpecified": { + SchemaProps: spec.SchemaProps{ + Description: "if this is true, we already checked to see if we need to do it and we don't", + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + }, + }, + } +} + func schema_pkg_apis_workflow_v1alpha1_Artifact(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -331,6 +404,12 @@ func schema_pkg_apis_workflow_v1alpha1_Artifact(ref common.ReferenceCallback) co Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact"), }, }, + "azure": { + SchemaProps: spec.SchemaProps{ + Description: "Azure contains Azure Storage artifact location details", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact"), + }, + }, "globalName": { SchemaProps: spec.SchemaProps{ Description: "GlobalName exports an output artifact to the global scope, making it available as '{{workflow.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts", @@ -372,12 +451,120 @@ func schema_pkg_apis_workflow_v1alpha1_Artifact(ref common.ReferenceCallback) co Format: "", }, }, + "artifactGC": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC"), + }, + }, + "deleted": { + SchemaProps: spec.SchemaProps{ + Description: "Has this been deleted?", + Type: []string{"boolean"}, + Format: "", + }, + }, }, Required: []string{"name"}, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArchiveStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArchiveStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactGC(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGC describes how to delete artifacts from completed Workflows", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "strategy": { + SchemaProps: spec.SchemaProps{ + Description: "Strategy is the strategy to use.", + Type: []string{"string"}, + Format: "", + }, + }, + "podMetadata": { + SchemaProps: spec.SchemaProps{ + Description: "PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metadata"), + }, + }, + "serviceAccountName": { + SchemaProps: spec.SchemaProps{ + Description: "ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion", + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metadata"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactGCSpec(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGCSpec specifies the Artifacts that need to be deleted", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "artifactsByNode": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactNodeSpec"), + }, + }, + }, + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactNodeSpec"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactGCStatus(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGCStatus describes the result of the deletion", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "artifactResultsByNode": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactResultsByNode maps Node name to result", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResultNodeStatus"), + }, + }, + }, + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResultNodeStatus"}, } } @@ -443,11 +630,53 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactLocation(ref common.ReferenceCall Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact"), }, }, + "azure": { + SchemaProps: spec.SchemaProps{ + Description: "Azure contains Azure Storage artifact location details", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact"), + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactNodeSpec(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "archiveLocation": { + SchemaProps: spec.SchemaProps{ + Description: "ArchiveLocation is the template-level Artifact location specification", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactLocation"), + }, + }, + "artifacts": { + SchemaProps: spec.SchemaProps{ + Description: "Artifacts maps artifact name to Artifact description", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact"), + }, + }, + }, + }, + }, }, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactLocation"}, } } @@ -542,6 +771,12 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactPaths(ref common.ReferenceCallbac Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact"), }, }, + "azure": { + SchemaProps: spec.SchemaProps{ + Description: "Azure contains Azure Storage artifact location details", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact"), + }, + }, "globalName": { SchemaProps: spec.SchemaProps{ Description: "GlobalName exports an output artifact to the global scope, making it available as '{{workflow.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts", @@ -583,12 +818,25 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactPaths(ref common.ReferenceCallbac Format: "", }, }, + "artifactGC": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC"), + }, + }, + "deleted": { + SchemaProps: spec.SchemaProps{ + Description: "Has this been deleted?", + Type: []string{"boolean"}, + Format: "", + }, + }, }, Required: []string{"name"}, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArchiveStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArchiveStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GitArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RawArtifact", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3Artifact"}, } } @@ -636,11 +884,17 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactRepository(ref common.ReferenceCa Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifactRepository"), }, }, + "azure": { + SchemaProps: spec.SchemaProps{ + Description: "Azure stores artifact in an Azure Storage account", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifactRepository"), + }, + }, }, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3ArtifactRepository"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactoryArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.AzureArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.GCSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HDFSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OSSArtifactRepository", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.S3ArtifactRepository"}, } } @@ -718,6 +972,166 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactRepositoryRefStatus(ref common.Re } } +func schema_pkg_apis_workflow_v1alpha1_ArtifactResult(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactResult describes the result of attempting to delete a given Artifact", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "name": { + SchemaProps: spec.SchemaProps{ + Description: "Name is the name of the Artifact", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "success": { + SchemaProps: spec.SchemaProps{ + Description: "Success describes whether the deletion succeeded", + Type: []string{"boolean"}, + Format: "", + }, + }, + "error": { + SchemaProps: spec.SchemaProps{ + Description: "Error is an optional error message which should be set if Success==false", + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"name"}, + }, + }, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactResultNodeStatus(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ArtifactResultNodeStatus describes the result of the deletion on a given node", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "artifactResults": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactResults maps Artifact name to result of the deletion", + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResult"), + }, + }, + }, + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactResult"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactSearchQuery(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "artifactGCStrategies": { + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: false, + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + }, + }, + "artifactName": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "templateName": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "nodeId": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "deleted": { + SchemaProps: spec.SchemaProps{ + Type: []string{"boolean"}, + Format: "", + }, + }, + "nodeTypes": { + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + AdditionalProperties: &spec.SchemaOrBool{ + Allows: true, + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: false, + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ArtifactSearchResult(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "Artifact": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact"), + }, + }, + "NodeID": { + SchemaProps: spec.SchemaProps{ + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"Artifact", "NodeID"}, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact"}, + } +} + func schema_pkg_apis_workflow_v1alpha1_ArtifactoryArtifact(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -815,6 +1229,156 @@ func schema_pkg_apis_workflow_v1alpha1_ArtifactoryAuth(ref common.ReferenceCallb } } +func schema_pkg_apis_workflow_v1alpha1_AzureArtifact(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "AzureArtifact is the location of a an Azure Storage artifact", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "endpoint": { + SchemaProps: spec.SchemaProps{ + Description: "Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\"", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "container": { + SchemaProps: spec.SchemaProps{ + Description: "Container is the container where resources will be stored", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "accountKeySecret": { + SchemaProps: spec.SchemaProps{ + Description: "AccountKeySecret is the secret selector to the Azure Blob Storage account access key", + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "useSDKCreds": { + SchemaProps: spec.SchemaProps{ + Description: "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + Type: []string{"boolean"}, + Format: "", + }, + }, + "blob": { + SchemaProps: spec.SchemaProps{ + Description: "Blob is the blob name (i.e., path) in the container where the artifact resides", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"endpoint", "container", "blob"}, + }, + }, + Dependencies: []string{ + "k8s.io/api/core/v1.SecretKeySelector"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_AzureArtifactRepository(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "endpoint": { + SchemaProps: spec.SchemaProps{ + Description: "Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\"", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "container": { + SchemaProps: spec.SchemaProps{ + Description: "Container is the container where resources will be stored", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "accountKeySecret": { + SchemaProps: spec.SchemaProps{ + Description: "AccountKeySecret is the secret selector to the Azure Blob Storage account access key", + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "useSDKCreds": { + SchemaProps: spec.SchemaProps{ + Description: "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + Type: []string{"boolean"}, + Format: "", + }, + }, + "blobNameFormat": { + SchemaProps: spec.SchemaProps{ + Description: "BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables", + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"endpoint", "container"}, + }, + }, + Dependencies: []string{ + "k8s.io/api/core/v1.SecretKeySelector"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_AzureBlobContainer(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "AzureBlobContainer contains the access information for interfacing with an Azure Blob Storage container", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "endpoint": { + SchemaProps: spec.SchemaProps{ + Description: "Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\"", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "container": { + SchemaProps: spec.SchemaProps{ + Description: "Container is the container where resources will be stored", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "accountKeySecret": { + SchemaProps: spec.SchemaProps{ + Description: "AccountKeySecret is the secret selector to the Azure Blob Storage account access key", + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "useSDKCreds": { + SchemaProps: spec.SchemaProps{ + Description: "UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", + Type: []string{"boolean"}, + Format: "", + }, + }, + }, + Required: []string{"endpoint", "container"}, + }, + }, + Dependencies: []string{ + "k8s.io/api/core/v1.SecretKeySelector"}, + } +} + func schema_pkg_apis_workflow_v1alpha1_Backoff(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -850,6 +1414,33 @@ func schema_pkg_apis_workflow_v1alpha1_Backoff(ref common.ReferenceCallback) com } } +func schema_pkg_apis_workflow_v1alpha1_BasicAuth(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "BasicAuth describes the secret selectors required for basic authentication", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "usernameSecret": { + SchemaProps: spec.SchemaProps{ + Description: "UsernameSecret is the secret selector to the repository username", + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "passwordSecret": { + SchemaProps: spec.SchemaProps{ + Description: "PasswordSecret is the secret selector to the repository password", + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + }, + }, + }, + Dependencies: []string{ + "k8s.io/api/core/v1.SecretKeySelector"}, + } +} + func schema_pkg_apis_workflow_v1alpha1_Cache(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -857,18 +1448,43 @@ func schema_pkg_apis_workflow_v1alpha1_Cache(ref common.ReferenceCallback) commo Description: "Cache is the configuration for the type of cache to be used", Type: []string{"object"}, Properties: map[string]spec.Schema{ - "configMap": { + "configMap": { + SchemaProps: spec.SchemaProps{ + Description: "ConfigMap sets a ConfigMap-based cache", + Ref: ref("k8s.io/api/core/v1.ConfigMapKeySelector"), + }, + }, + }, + Required: []string{"configMap"}, + }, + }, + Dependencies: []string{ + "k8s.io/api/core/v1.ConfigMapKeySelector"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_ClientCertAuth(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "ClientCertAuth holds necessary information for client authentication via certificates", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "clientCertSecret": { SchemaProps: spec.SchemaProps{ - Description: "ConfigMap sets a ConfigMap-based cache", - Ref: ref("k8s.io/api/core/v1.ConfigMapKeySelector"), + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "clientKeySecret": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), }, }, }, - Required: []string{"configMap"}, }, }, Dependencies: []string{ - "k8s.io/api/core/v1.ConfigMapKeySelector"}, + "k8s.io/api/core/v1.SecretKeySelector"}, } } @@ -1012,14 +1628,14 @@ func schema_pkg_apis_workflow_v1alpha1_ContainerNode(ref common.ReferenceCallbac }, "image": { SchemaProps: spec.SchemaProps{ - Description: "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + Description: "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", Type: []string{"string"}, Format: "", }, }, "command": { SchemaProps: spec.SchemaProps{ - Description: "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -1034,7 +1650,7 @@ func schema_pkg_apis_workflow_v1alpha1_ContainerNode(ref common.ReferenceCallbac }, "args": { SchemaProps: spec.SchemaProps{ - Description: "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -2111,6 +2727,20 @@ func schema_pkg_apis_workflow_v1alpha1_GitArtifact(ref common.ReferenceCallback) Format: "", }, }, + "singleBranch": { + SchemaProps: spec.SchemaProps{ + Description: "SingleBranch enables single branch clone, using the `branch` parameter", + Type: []string{"boolean"}, + Format: "", + }, + }, + "branch": { + SchemaProps: spec.SchemaProps{ + Description: "Branch is the branch to fetch when `SingleBranch` is enabled", + Type: []string{"string"}, + Format: "", + }, + }, }, Required: []string{"repo"}, }, @@ -2488,9 +3118,15 @@ func schema_pkg_apis_workflow_v1alpha1_HTTP(ref common.ReferenceCallback) common Format: "", }, }, + "bodyFrom": { + SchemaProps: spec.SchemaProps{ + Description: "BodyFrom is content of the HTTP Request as Bytes", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPBodySource"), + }, + }, "insecureSkipVerify": { SchemaProps: spec.SchemaProps{ - Description: "insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", + Description: "InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", Type: []string{"boolean"}, Format: "", }, @@ -2500,7 +3136,7 @@ func schema_pkg_apis_workflow_v1alpha1_HTTP(ref common.ReferenceCallback) common }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPHeader"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPBodySource", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPHeader"}, } } @@ -2508,7 +3144,7 @@ func schema_pkg_apis_workflow_v1alpha1_HTTPArtifact(ref common.ReferenceCallback return common.OpenAPIDefinition{ Schema: spec.Schema{ SchemaProps: spec.SchemaProps{ - Description: "HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container", + Description: "HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container", Type: []string{"object"}, Properties: map[string]spec.Schema{ "url": { @@ -2533,12 +3169,69 @@ func schema_pkg_apis_workflow_v1alpha1_HTTPArtifact(ref common.ReferenceCallback }, }, }, + "auth": { + SchemaProps: spec.SchemaProps{ + Description: "Auth contains information for client authentication", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPAuth"), + }, + }, }, Required: []string{"url"}, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Header"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.HTTPAuth", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Header"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_HTTPAuth(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "clientCert": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ClientCertAuth"), + }, + }, + "oauth2": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2Auth"), + }, + }, + "basicAuth": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.BasicAuth"), + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.BasicAuth", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ClientCertAuth", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2Auth"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_HTTPBodySource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "HTTPBodySource contains the source of the HTTP body.", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "bytes": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "byte", + }, + }, + }, + }, + }, } } @@ -2812,7 +3505,6 @@ func schema_pkg_apis_workflow_v1alpha1_LifecycleHook(ref common.ReferenceCallbac "template": { SchemaProps: spec.SchemaProps{ Description: "Template is the name of the template to execute by the hook", - Default: "", Type: []string{"string"}, Format: "", }, @@ -2838,7 +3530,6 @@ func schema_pkg_apis_workflow_v1alpha1_LifecycleHook(ref common.ReferenceCallbac }, }, }, - Required: []string{"template"}, }, }, Dependencies: []string{ @@ -2890,6 +3581,27 @@ func schema_pkg_apis_workflow_v1alpha1_Link(ref common.ReferenceCallback) common } } +func schema_pkg_apis_workflow_v1alpha1_ManifestFrom(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "artifact": { + SchemaProps: spec.SchemaProps{ + Description: "Artifact contains the artifact to use", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact"), + }, + }, + }, + Required: []string{"artifact"}, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Artifact"}, + } +} + func schema_pkg_apis_workflow_v1alpha1_MemoizationStatus(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -3441,6 +4153,92 @@ func schema_pkg_apis_workflow_v1alpha1_NoneStrategy(ref common.ReferenceCallback } } +func schema_pkg_apis_workflow_v1alpha1_OAuth2Auth(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "OAuth2Auth holds all information for client authentication via OAuth2 tokens", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "clientIDSecret": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "clientSecretSecret": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "tokenURLSecret": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/api/core/v1.SecretKeySelector"), + }, + }, + "scopes": { + SchemaProps: spec.SchemaProps{ + Type: []string{"array"}, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + "endpointParams": { + SchemaProps: spec.SchemaProps{ + Type: []string{"array"}, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2EndpointParam"), + }, + }, + }, + }, + }, + }, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.OAuth2EndpointParam", "k8s.io/api/core/v1.SecretKeySelector"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_OAuth2EndpointParam(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "EndpointParam is for requesting optional fields that should be sent in the oauth request", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "key": { + SchemaProps: spec.SchemaProps{ + Description: "Name is the header name", + Default: "", + Type: []string{"string"}, + Format: "", + }, + }, + "value": { + SchemaProps: spec.SchemaProps{ + Description: "Value is the literal value to use for the header", + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"key"}, + }, + }, + } +} + func schema_pkg_apis_workflow_v1alpha1_OSSArtifact(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -3991,6 +4789,12 @@ func schema_pkg_apis_workflow_v1alpha1_ResourceTemplate(ref common.ReferenceCall Format: "", }, }, + "manifestFrom": { + SchemaProps: spec.SchemaProps{ + Description: "ManifestFrom is the source for a single kubernetes manifest", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ManifestFrom"), + }, + }, "setOwnerReference": { SchemaProps: spec.SchemaProps{ Description: "SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.", @@ -4031,6 +4835,8 @@ func schema_pkg_apis_workflow_v1alpha1_ResourceTemplate(ref common.ReferenceCall Required: []string{"action"}, }, }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ManifestFrom"}, } } @@ -4074,7 +4880,7 @@ func schema_pkg_apis_workflow_v1alpha1_RetryStrategy(ref common.ReferenceCallbac Properties: map[string]spec.Schema{ "limit": { SchemaProps: spec.SchemaProps{ - Description: "Limit is the maximum number of attempts when retrying a container", + Description: "Limit is the maximum number of retry attempts when retrying a container. It does not include the original container; the maximum number of total attempts will be `limit + 1`.", Ref: ref("k8s.io/apimachinery/pkg/util/intstr.IntOrString"), }, }, @@ -4432,14 +5238,14 @@ func schema_pkg_apis_workflow_v1alpha1_ScriptTemplate(ref common.ReferenceCallba }, "image": { SchemaProps: spec.SchemaProps{ - Description: "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + Description: "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", Type: []string{"string"}, Format: "", }, }, "command": { SchemaProps: spec.SchemaProps{ - Description: "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -4454,7 +5260,7 @@ func schema_pkg_apis_workflow_v1alpha1_ScriptTemplate(ref common.ReferenceCallba }, "args": { SchemaProps: spec.SchemaProps{ - Description: "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -5514,14 +6320,14 @@ func schema_pkg_apis_workflow_v1alpha1_UserContainer(ref common.ReferenceCallbac }, "image": { SchemaProps: spec.SchemaProps{ - Description: "Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + Description: "Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", Type: []string{"string"}, Format: "", }, }, "command": { SchemaProps: spec.SchemaProps{ - Description: "Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -5536,7 +6342,7 @@ func schema_pkg_apis_workflow_v1alpha1_UserContainer(ref common.ReferenceCallbac }, "args": { SchemaProps: spec.SchemaProps{ - Description: "Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", + Description: "Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell", Type: []string{"array"}, Items: &spec.SchemaOrArray{ Schema: &spec.Schema{ @@ -5963,6 +6769,103 @@ func schema_pkg_apis_workflow_v1alpha1_Workflow(ref common.ReferenceCallback) co } } +func schema_pkg_apis_workflow_v1alpha1_WorkflowArtifactGCTask(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "WorkflowArtifactGCTask specifies the Artifacts that need to be deleted as well as the status of deletion", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "kind": { + SchemaProps: spec.SchemaProps{ + Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", + Type: []string{"string"}, + Format: "", + }, + }, + "apiVersion": { + SchemaProps: spec.SchemaProps{ + Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", + Type: []string{"string"}, + Format: "", + }, + }, + "metadata": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"), + }, + }, + "spec": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCSpec"), + }, + }, + "status": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCStatus"), + }, + }, + }, + Required: []string{"metadata", "spec"}, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCSpec", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGCStatus", "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"}, + } +} + +func schema_pkg_apis_workflow_v1alpha1_WorkflowArtifactGCTaskList(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "WorkflowArtifactGCTaskList is list of WorkflowArtifactGCTask resources", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "kind": { + SchemaProps: spec.SchemaProps{ + Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", + Type: []string{"string"}, + Format: "", + }, + }, + "apiVersion": { + SchemaProps: spec.SchemaProps{ + Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", + Type: []string{"string"}, + Format: "", + }, + }, + "metadata": { + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta"), + }, + }, + "items": { + SchemaProps: spec.SchemaProps{ + Type: []string{"array"}, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Default: map[string]interface{}{}, + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowArtifactGCTask"), + }, + }, + }, + }, + }, + }, + Required: []string{"metadata", "items"}, + }, + }, + Dependencies: []string{ + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowArtifactGCTask", "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta"}, + } +} + func schema_pkg_apis_workflow_v1alpha1_WorkflowEventBinding(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -6428,7 +7331,7 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowSpec(ref common.ReferenceCallback }, "podGC": { SchemaProps: spec.SchemaProps{ - Description: "PodGC describes the strategy to use when to deleting completed pods", + Description: "PodGC describes the strategy to use when deleting completed pods", Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.PodGC"), }, }, @@ -6441,7 +7344,7 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowSpec(ref common.ReferenceCallback }, "podPriority": { SchemaProps: spec.SchemaProps{ - Description: "Priority to apply to workflow pods.", + Description: "Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.", Type: []string{"integer"}, Format: "int32", }, @@ -6511,7 +7414,7 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowSpec(ref common.ReferenceCallback }, "volumeClaimGC": { SchemaProps: spec.SchemaProps{ - Description: "VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows", + Description: "VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows", Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.VolumeClaimGC"), }, }, @@ -6557,15 +7460,21 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowSpec(ref common.ReferenceCallback }, "workflowMetadata": { SchemaProps: spec.SchemaProps{ - Description: "WorkflowMetadata contains some metadata of the workflow to be refer", + Description: "WorkflowMetadata contains some metadata of the workflow to refer to", Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowMetadata"), }, }, + "artifactGC": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts unless Artifact.ArtifactGC is specified, which overrides this)", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC"), + }, + }, }, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Arguments", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRef", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ExecutorConfig", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.LifecycleHook", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metadata", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metrics", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.PodGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RetryStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Synchronization", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.TTLStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Template", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.VolumeClaimGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowMetadata", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowTemplateRef", "k8s.io/api/core/v1.Affinity", "k8s.io/api/core/v1.HostAlias", "k8s.io/api/core/v1.LocalObjectReference", "k8s.io/api/core/v1.PersistentVolumeClaim", "k8s.io/api/core/v1.PodDNSConfig", "k8s.io/api/core/v1.PodSecurityContext", "k8s.io/api/core/v1.Toleration", "k8s.io/api/core/v1.Volume", "k8s.io/api/policy/v1beta1.PodDisruptionBudgetSpec"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Arguments", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRef", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ExecutorConfig", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.LifecycleHook", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metadata", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Metrics", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.PodGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.RetryStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Synchronization", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.TTLStrategy", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Template", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.VolumeClaimGC", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowMetadata", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowTemplateRef", "k8s.io/api/core/v1.Affinity", "k8s.io/api/core/v1.HostAlias", "k8s.io/api/core/v1.LocalObjectReference", "k8s.io/api/core/v1.PersistentVolumeClaim", "k8s.io/api/core/v1.PodDNSConfig", "k8s.io/api/core/v1.PodSecurityContext", "k8s.io/api/core/v1.Toleration", "k8s.io/api/core/v1.Volume", "k8s.io/api/policy/v1beta1.PodDisruptionBudgetSpec"}, } } @@ -6730,11 +7639,17 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowStatus(ref common.ReferenceCallba Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRefStatus"), }, }, + "artifactGCStatus": { + SchemaProps: spec.SchemaProps{ + Description: "ArtifactGCStatus maintains the status of Artifact Garbage Collection", + Ref: ref("github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtGCStatus"), + }, + }, }, }, }, Dependencies: []string{ - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRefStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Condition", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.NodeStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Outputs", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.SynchronizationStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Template", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowSpec", "k8s.io/api/core/v1.Volume", "k8s.io/apimachinery/pkg/apis/meta/v1.Time"}, + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtGCStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.ArtifactRepositoryRefStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Condition", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.NodeStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Outputs", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.SynchronizationStatus", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.Template", "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1.WorkflowSpec", "k8s.io/api/core/v1.Volume", "k8s.io/apimachinery/pkg/apis/meta/v1.Time"}, } } diff --git a/pkg/apis/workflow/v1alpha1/register.go b/pkg/apis/workflow/v1alpha1/register.go index 371cfb101da2..b4d6738f46a2 100644 --- a/pkg/apis/workflow/v1alpha1/register.go +++ b/pkg/apis/workflow/v1alpha1/register.go @@ -44,8 +44,12 @@ func addKnownTypes(scheme *runtime.Scheme) error { &ClusterWorkflowTemplateList{}, &WorkflowTaskSet{}, &WorkflowTaskSetList{}, + &WorkflowArtifactGCTask{}, + &WorkflowArtifactGCTaskList{}, &WorkflowTaskResult{}, &WorkflowTaskResultList{}, + &WorkflowArtifactGCTask{}, + &WorkflowArtifactGCTaskList{}, ) metav1.AddToGroupVersion(scheme, SchemeGroupVersion) return nil diff --git a/pkg/apis/workflow/v1alpha1/workflow_phase.go b/pkg/apis/workflow/v1alpha1/workflow_phase.go index cb2e62b39d7c..4027b10dca6a 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_phase.go +++ b/pkg/apis/workflow/v1alpha1/workflow_phase.go @@ -8,7 +8,7 @@ const ( WorkflowPending WorkflowPhase = "Pending" // pending some set-up - rarely used WorkflowRunning WorkflowPhase = "Running" // any node has started; pods might not be running yet, the workflow maybe suspended too WorkflowSucceeded WorkflowPhase = "Succeeded" - WorkflowFailed WorkflowPhase = "Failed" // it maybe that the the workflow was terminated + WorkflowFailed WorkflowPhase = "Failed" // it maybe that the workflow was terminated WorkflowError WorkflowPhase = "Error" ) diff --git a/pkg/apis/workflow/v1alpha1/workflow_types.go b/pkg/apis/workflow/v1alpha1/workflow_types.go index 76743cb58880..76340b563e9c 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types.go @@ -83,6 +83,22 @@ const ( NodeTypePlugin NodeType = "Plugin" ) +// ArtifactGCStrategy is the strategy when to delete artifacts for GC. +type ArtifactGCStrategy string + +// ArtifactGCStrategy +const ( + ArtifactGCOnWorkflowCompletion ArtifactGCStrategy = "OnWorkflowCompletion" + ArtifactGCOnWorkflowDeletion ArtifactGCStrategy = "OnWorkflowDeletion" + ArtifactGCNever ArtifactGCStrategy = "Never" + ArtifactGCStrategyUndefined ArtifactGCStrategy = "" +) + +var AnyArtifactGCStrategy = map[ArtifactGCStrategy]bool{ + ArtifactGCOnWorkflowCompletion: true, + ArtifactGCOnWorkflowDeletion: true, +} + // PodGCStrategy is the strategy when to delete completed pods for GC. type PodGCStrategy string @@ -121,6 +137,7 @@ const ( // +kubebuilder:resource:shortName=wf // +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase",description="Status of the workflow" // +kubebuilder:printcolumn:name="Age",type="date",format="date-time",JSONPath=".status.startedAt",description="When the workflow was started" +// +kubebuilder:printcolumn:name="Message",type="string",JSONPath=".status.message",description="Human readable message indicating details about why the workflow is in this condition." // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type Workflow struct { metav1.TypeMeta `json:",inline"` @@ -185,6 +202,38 @@ func (w *Workflow) GetExecSpec() *WorkflowSpec { return &w.Spec } +func (w *Workflow) HasArtifactGC() bool { + + if w.Spec.ArtifactGC != nil && w.Spec.ArtifactGC.Strategy != ArtifactGCNever && w.Spec.ArtifactGC.Strategy != ArtifactGCStrategyUndefined { + return true + } + + // either it's defined by an Output Artifact or by the WorkflowSpec itself, or both + for _, template := range w.GetTemplates() { + for _, artifact := range template.Outputs.Artifacts { + if artifact.GetArtifactGC().Strategy != ArtifactGCNever && artifact.GetArtifactGC().Strategy != ArtifactGCStrategyUndefined { + return true + } + } + } + return false +} + +// return the ultimate ArtifactGCStrategy for the Artifact +// (defined on the Workflow level but can be overridden on the Artifact level) +func (w *Workflow) GetArtifactGCStrategy(a *Artifact) ArtifactGCStrategy { + artifactStrategy := a.GetArtifactGC().GetStrategy() + wfStrategy := w.Spec.GetArtifactGC().GetStrategy() + strategy := wfStrategy + if artifactStrategy != ArtifactGCStrategyUndefined { + strategy = artifactStrategy + } + if strategy == ArtifactGCStrategyUndefined { + return ArtifactGCNever + } + return strategy +} + var ( WorkflowCreatedAfter = func(t time.Time) WorkflowPredicate { return func(wf Workflow) bool { @@ -331,13 +380,14 @@ type WorkflowSpec struct { // +optional SchedulerName string `json:"schedulerName,omitempty" protobuf:"bytes,21,opt,name=schedulerName"` - // PodGC describes the strategy to use when to deleting completed pods + // PodGC describes the strategy to use when deleting completed pods PodGC *PodGC `json:"podGC,omitempty" protobuf:"bytes,22,opt,name=podGC"` // PriorityClassName to apply to workflow pods. PodPriorityClassName string `json:"podPriorityClassName,omitempty" protobuf:"bytes,23,opt,name=podPriorityClassName"` // Priority to apply to workflow pods. + // DEPRECATED: Use PodPriorityClassName instead. PodPriority *int32 `json:"podPriority,omitempty" protobuf:"bytes,24,opt,name=podPriority"` // +patchStrategy=merge @@ -371,7 +421,7 @@ type WorkflowSpec struct { // Synchronization holds synchronization lock configuration for this Workflow Synchronization *Synchronization `json:"synchronization,omitempty" protobuf:"bytes,35,opt,name=synchronization,casttype=Synchronization"` - // VolumeClaimGC describes the strategy to use when to deleting volumes from completed workflows + // VolumeClaimGC describes the strategy to use when deleting volumes from completed workflows VolumeClaimGC *VolumeClaimGC `json:"volumeClaimGC,omitempty" protobuf:"bytes,36,opt,name=volumeClaimGC,casttype=VolumeClaimGC"` // RetryStrategy for all templates in the workflow. @@ -390,8 +440,12 @@ type WorkflowSpec struct { // step, irrespective of the success, failure, or error status of the primary step Hooks LifecycleHooks `json:"hooks,omitempty" protobuf:"bytes,41,opt,name=hooks"` - // WorkflowMetadata contains some metadata of the workflow to be refer + // WorkflowMetadata contains some metadata of the workflow to refer to WorkflowMetadata *WorkflowMetadata `json:"workflowMetadata,omitempty" protobuf:"bytes,42,opt,name=workflowMetadata"` + + // ArtifactGC describes the strategy to use when deleting artifacts from completed or deleted workflows (applies to all output Artifacts + // unless Artifact.ArtifactGC is specified, which overrides this) + ArtifactGC *ArtifactGC `json:"artifactGC,omitempty" protobuf:"bytes,43,opt,name=artifactGC"` } type LabelValueFrom struct { @@ -433,6 +487,15 @@ func (wfs WorkflowSpec) GetVolumeClaimGC() *VolumeClaimGC { return wfs.VolumeClaimGC } +// ArtifactGC returns the ArtifactGC that was defined in the workflow spec. If none was provided, a default value is returned. +func (wfs WorkflowSpec) GetArtifactGC() *ArtifactGC { + if wfs.ArtifactGC == nil { + return &ArtifactGC{Strategy: ArtifactGCStrategyUndefined} + } + + return wfs.ArtifactGC +} + func (wfs WorkflowSpec) GetTTLStrategy() *TTLStrategy { return wfs.TTLStrategy } @@ -762,6 +825,13 @@ func (tmpl *Template) HasParallelism() bool { return tmpl.Parallelism != nil && *tmpl.Parallelism > 0 } +func (tmpl *Template) GetOutputs() *Outputs { + if tmpl != nil { + return &tmpl.Outputs + } + return nil +} + type Artifacts []Artifact func (a Artifacts) GetArtifactByName(name string) *Artifact { @@ -906,6 +976,21 @@ type Artifact struct { // FromExpression, if defined, is evaluated to specify the value for the artifact FromExpression string `json:"fromExpression,omitempty" protobuf:"bytes,11,opt,name=fromExpression"` + + // ArtifactGC describes the strategy to use when to deleting an artifact from completed or deleted workflows + ArtifactGC *ArtifactGC `json:"artifactGC,omitempty" protobuf:"bytes,12,opt,name=artifactGC"` + + // Has this been deleted? + Deleted bool `json:"deleted,omitempty" protobuf:"varint,13,opt,name=deleted"` +} + +// ArtifactGC returns the ArtifactGC that was defined by the artifact. If none was provided, a default value is returned. +func (a *Artifact) GetArtifactGC() *ArtifactGC { + if a.ArtifactGC == nil { + return &ArtifactGC{Strategy: ArtifactGCStrategyUndefined} + } + + return a.ArtifactGC } // CleanPath validates and cleans the artifact path. @@ -965,6 +1050,27 @@ func (podGC *PodGC) GetStrategy() PodGCStrategy { return PodGCOnPodNone } +// ArtifactGC describes how to delete artifacts from completed Workflows +type ArtifactGC struct { + // Strategy is the strategy to use. + // +kubebuilder:validation:Enum="";OnWorkflowCompletion;OnWorkflowDeletion;Never + Strategy ArtifactGCStrategy `json:"strategy,omitempty" protobuf:"bytes,1,opt,name=strategy,casttype=ArtifactGCStategy"` + + // PodMetadata is an optional field for specifying the Labels and Annotations that should be assigned to the Pod doing the deletion + PodMetadata *Metadata `json:"podMetadata,omitempty" protobuf:"bytes,2,opt,name=podMetadata"` + + // ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion + ServiceAccountName string `json:"serviceAccountName,omitempty" protobuf:"bytes,3,opt,name=serviceAccountName"` +} + +// GetStrategy returns the VolumeClaimGCStrategy to use for the workflow +func (agc *ArtifactGC) GetStrategy() ArtifactGCStrategy { + if agc != nil { + return agc.Strategy + } + return ArtifactGCStrategyUndefined +} + // VolumeClaimGC describes how to delete volumes from completed Workflows type VolumeClaimGC struct { // Strategy is the strategy to use. One of "OnWorkflowCompletion", "OnWorkflowSuccess" @@ -1039,6 +1145,9 @@ type ArtifactLocation struct { // GCS contains GCS artifact location details GCS *GCSArtifact `json:"gcs,omitempty" protobuf:"bytes,9,opt,name=gcs"` + + // Azure contains Azure Storage artifact location details + Azure *AzureArtifact `json:"azure,omitempty" protobuf:"bytes,10,opt,name=azure"` } func (a *ArtifactLocation) Get() (ArtifactLocationType, error) { @@ -1046,6 +1155,8 @@ func (a *ArtifactLocation) Get() (ArtifactLocationType, error) { return nil, fmt.Errorf("key unsupported: cannot get key for artifact location, because it is invalid") } else if a.Artifactory != nil { return a.Artifactory, nil + } else if a.Azure != nil { + return a.Azure, nil } else if a.Git != nil { return a.Git, nil } else if a.GCS != nil { @@ -1070,6 +1181,8 @@ func (a *ArtifactLocation) SetType(x ArtifactLocationType) error { switch v := x.(type) { case *ArtifactoryArtifact: a.Artifactory = &ArtifactoryArtifact{} + case *AzureArtifact: + a.Azure = &AzureArtifact{} case *GCSArtifact: a.GCS = &GCSArtifact{} case *HDFSArtifact: @@ -1201,6 +1314,127 @@ func (r *ArtifactRepositoryRefStatus) String() string { return fmt.Sprintf("%s/%s", r.Namespace, r.ArtifactRepositoryRef.String()) } +type ArtifactSearchQuery struct { + ArtifactGCStrategies map[ArtifactGCStrategy]bool `json:"artifactGCStrategies,omitempty" protobuf:"bytes,1,rep,name=artifactGCStrategies,castkey=ArtifactGCStrategy"` + ArtifactName string `json:"artifactName,omitempty" protobuf:"bytes,2,rep,name=artifactName"` + TemplateName string `json:"templateName,omitempty" protobuf:"bytes,3,rep,name=templateName"` + NodeId string `json:"nodeId,omitempty" protobuf:"bytes,4,rep,name=nodeId"` + Deleted *bool `json:"deleted,omitempty" protobuf:"varint,5,opt,name=deleted"` + NodeTypes map[NodeType]bool `json:"nodeTypes,omitempty" protobuf:"bytes,6,opt,name=nodeTypes"` +} + +// ArtGCStatus maintains state related to ArtifactGC +type ArtGCStatus struct { + + // have Pods been started to perform this strategy? (enables us not to re-process what we've already done) + StrategiesProcessed map[ArtifactGCStrategy]bool `json:"strategiesProcessed,omitempty" protobuf:"bytes,1,opt,name=strategiesProcessed"` + + // have completed Pods been processed? (mapped by Pod name) + // used to prevent re-processing the Status of a Pod more than once + PodsRecouped map[string]bool `json:"podsRecouped,omitempty" protobuf:"bytes,2,opt,name=podsRecouped"` + + // if this is true, we already checked to see if we need to do it and we don't + NotSpecified bool `json:"notSpecified,omitempty" protobuf:"varint,3,opt,name=notSpecified"` +} + +func (gcStatus *ArtGCStatus) SetArtifactGCStrategyProcessed(strategy ArtifactGCStrategy, processed bool) { + if gcStatus.StrategiesProcessed == nil { + gcStatus.StrategiesProcessed = make(map[ArtifactGCStrategy]bool) + } + gcStatus.StrategiesProcessed[strategy] = processed +} + +func (gcStatus *ArtGCStatus) IsArtifactGCStrategyProcessed(strategy ArtifactGCStrategy) bool { + if gcStatus.StrategiesProcessed != nil { + processed := gcStatus.StrategiesProcessed[strategy] + return processed + } + return false +} + +func (gcStatus *ArtGCStatus) SetArtifactGCPodRecouped(podName string, recouped bool) { + if gcStatus.PodsRecouped == nil { + gcStatus.PodsRecouped = make(map[string]bool) + } + gcStatus.PodsRecouped[podName] = recouped +} + +func (gcStatus *ArtGCStatus) IsArtifactGCPodRecouped(podName string) bool { + if gcStatus.PodsRecouped != nil { + recouped := gcStatus.PodsRecouped[podName] + return recouped + } + return false +} + +type ArtifactSearchResult struct { + Artifact `protobuf:"bytes,1,opt,name=artifact"` + NodeID string `protobuf:"bytes,2,opt,name=nodeID"` +} + +type ArtifactSearchResults []ArtifactSearchResult + +func (asr ArtifactSearchResults) GetArtifacts() []Artifact { + artifacts := make([]Artifact, len(asr)) + for i, result := range asr { + artifacts[i] = result.Artifact + } + return artifacts +} + +func NewArtifactSearchQuery() *ArtifactSearchQuery { + var q ArtifactSearchQuery + q.ArtifactGCStrategies = make(map[ArtifactGCStrategy]bool) + return &q +} + +func (q *ArtifactSearchQuery) anyArtifactGCStrategy() bool { + for _, val := range q.ArtifactGCStrategies { + if val { + return val + } + } + return false +} + +func (w *Workflow) SearchArtifacts(q *ArtifactSearchQuery) ArtifactSearchResults { + + var results ArtifactSearchResults + + for _, n := range w.Status.Nodes { + if q.TemplateName != "" && n.TemplateName != q.TemplateName { + continue + } + if q.NodeId != "" && n.ID != q.NodeId { + continue + } + if q.NodeTypes != nil && !q.NodeTypes[n.Type] { + continue + } + for _, a := range n.GetOutputs().GetArtifacts() { + match := true + if q.anyArtifactGCStrategy() { + // artifact strategy is either based on overall Workflow ArtifactGC Strategy, or + // if it's specified on the individual artifact level that takes priority + artifactStrategy := w.GetArtifactGCStrategy(&a) + if !q.ArtifactGCStrategies[artifactStrategy] { + match = false + } + } + if q.ArtifactName != "" && a.Name != q.ArtifactName { + match = false + } + if q.Deleted != nil && a.Deleted != *q.Deleted { + match = false + } + if match { + results = append(results, ArtifactSearchResult{Artifact: a, NodeID: n.ID}) + } + } + } + return results +} + // Outputs hold parameters, artifacts, and results from a step type Outputs struct { // Parameters holds the list of output parameters produced by a step @@ -1220,6 +1454,13 @@ type Outputs struct { ExitCode *string `json:"exitCode,omitempty" protobuf:"bytes,4,opt,name=exitCode"` } +func (o *Outputs) GetArtifacts() Artifacts { + if o == nil { + return nil + } + return o.Artifacts +} + // WorkflowStep is a reference to a template to execute in a series of step type WorkflowStep struct { // Name of the step @@ -1265,6 +1506,17 @@ type WorkflowStep struct { Hooks LifecycleHooks `json:"hooks,omitempty" protobuf:"bytes,12,opt,name=hooks"` } +func (step *WorkflowStep) GetName() string { + return step.Name +} + +func (step *WorkflowStep) IsDAGTask() bool { + return false +} +func (step *WorkflowStep) IsWorkflowStep() bool { + return true +} + type LifecycleEvent string const ( @@ -1287,7 +1539,7 @@ func (lchs LifecycleHooks) HasExitHook() bool { type LifecycleHook struct { // Template is the name of the template to execute by the hook - Template string `json:"template," protobuf:"bytes,1,opt,name=template"` + Template string `json:"template,omitempty" protobuf:"bytes,1,opt,name=template"` // Arguments hold arguments to the template Arguments Arguments `json:"arguments,omitempty" protobuf:"bytes,2,opt,name=arguments"` // TemplateRef is the reference to the template resource to execute by the hook @@ -1452,6 +1704,10 @@ func (n Nodes) FindByDisplayName(name string) *NodeStatus { return n.Find(NodeWithDisplayName(name)) } +func (n Nodes) FindByName(name string) *NodeStatus { + return n.Find(NodeWithName(name)) +} + func (in Nodes) Any(f func(NodeStatus) bool) bool { return in.Find(f) != nil } @@ -1465,6 +1721,10 @@ func (n Nodes) Find(f func(NodeStatus) bool) *NodeStatus { return nil } +func NodeWithName(name string) func(n NodeStatus) bool { + return func(n NodeStatus) bool { return n.Name == name } +} + func NodeWithDisplayName(name string) func(n NodeStatus) bool { return func(n NodeStatus) bool { return n.DisplayName == name } } @@ -1577,6 +1837,9 @@ type WorkflowStatus struct { // ArtifactRepositoryRef is used to cache the repository to use so we do not need to determine it everytime we reconcile. ArtifactRepositoryRef *ArtifactRepositoryRefStatus `json:"artifactRepositoryRef,omitempty" protobuf:"bytes,18,opt,name=artifactRepositoryRef"` + + // ArtifactGCStatus maintains the status of Artifact Garbage Collection + ArtifactGCStatus *ArtGCStatus `json:"artifactGCStatus,omitempty" protobuf:"bytes,19,opt,name=artifactGCStatus"` } func (ws *WorkflowStatus) IsOffloadNodeStatus() bool { @@ -1587,6 +1850,14 @@ func (ws *WorkflowStatus) GetOffloadNodeStatusVersion() string { return ws.OffloadNodeStatusVersion } +func (ws *WorkflowStatus) GetStoredTemplates() []Template { + var out []Template + for _, t := range ws.StoredTemplates { + out = append(out, t) + } + return out +} + func (wf *Workflow) GetOffloadNodeStatusVersion() string { return wf.Status.GetOffloadNodeStatusVersion() } @@ -1621,7 +1892,8 @@ type RetryAffinity struct { // RetryStrategy provides controls on how to retry a workflow step type RetryStrategy struct { - // Limit is the maximum number of attempts when retrying a container + // Limit is the maximum number of retry attempts when retrying a container. It does not include the original + // container; the maximum number of total attempts will be `limit + 1`. Limit *intstr.IntOrString `json:"limit,omitempty" protobuf:"varint,1,opt,name=limit"` // RetryPolicy is a policy of NodePhase statuses that will be retried @@ -1760,6 +2032,8 @@ const ( ConditionTypeSpecError ConditionType = "SpecError" // ConditionTypeMetricsError is an error during metric emission ConditionTypeMetricsError ConditionType = "MetricsError" + //ConditionTypeArtifactGCError is an error on artifact garbage collection + ConditionTypeArtifactGCError ConditionType = "ArtifactGCError" ) type Condition struct { @@ -1863,6 +2137,18 @@ type NodeStatus struct { SynchronizationStatus *NodeSynchronizationStatus `json:"synchronizationStatus,omitempty" protobuf:"bytes,25,opt,name=synchronizationStatus"` } +func (n *NodeStatus) GetName() string { + return n.Name +} + +func (n *NodeStatus) IsDAGTask() bool { + return false +} + +func (n *NodeStatus) IsWorkflowStep() bool { + return false +} + // Fulfilled returns whether a phase is fulfilled, i.e. it completed execution or was skipped or omitted func (phase NodePhase) Fulfilled() bool { return phase.Completed() || phase == NodeSkipped || phase == NodeOmitted @@ -1993,6 +2279,13 @@ func (n *NodeStatus) GetTemplateRef() *TemplateRef { return n.TemplateRef } +func (n *NodeStatus) GetOutputs() *Outputs { + if n == nil { + return nil + } + return n.Outputs +} + // IsActiveSuspendNode returns whether this node is an active suspend node func (n *NodeStatus) IsActiveSuspendNode() bool { return n.Type == NodeTypeSuspend && n.Phase == NodeRunning @@ -2117,6 +2410,12 @@ type GitArtifact struct { // DisableSubmodules disables submodules during git clone DisableSubmodules bool `json:"disableSubmodules,omitempty" protobuf:"varint,9,opt,name=disableSubmodules"` + + // SingleBranch enables single branch clone, using the `branch` parameter + SingleBranch bool `json:"singleBranch,omitempty" protobuf:"varint,10,opt,name=singleBranch"` + + // Branch is the branch to fetch when `SingleBranch` is enabled + Branch string `json:"branch,omitempty" protobuf:"bytes,11,opt,name=branch"` } func (g *GitArtifact) HasLocation() bool { @@ -2154,9 +2453,9 @@ type ArtifactoryArtifact struct { ArtifactoryAuth `json:",inline" protobuf:"bytes,2,opt,name=artifactoryAuth"` } -//func (a *ArtifactoryArtifact) String() string { -// return a.URL -//} +// func (a *ArtifactoryArtifact) String() string { +// return a.URL +// } func (a *ArtifactoryArtifact) GetKey() (string, error) { u, err := url.Parse(a.URL) if err != nil { @@ -2179,6 +2478,42 @@ func (a *ArtifactoryArtifact) HasLocation() bool { return a != nil && a.URL != "" && a.UsernameSecret != nil } +// AzureBlobContainer contains the access information for interfacing with an Azure Blob Storage container +type AzureBlobContainer struct { + // Endpoint is the service url associated with an account. It is most likely "https://.blob.core.windows.net" + Endpoint string `json:"endpoint" protobuf:"bytes,1,opt,name=endpoint"` + + // Container is the container where resources will be stored + Container string `json:"container" protobuf:"bytes,2,opt,name=container"` + + // AccountKeySecret is the secret selector to the Azure Blob Storage account access key + AccountKeySecret *apiv1.SecretKeySelector `json:"accountKeySecret,omitempty" protobuf:"bytes,3,opt,name=accountKeySecret"` + + // UseSDKCreds tells the driver to figure out credentials based on sdk defaults. + UseSDKCreds bool `json:"useSDKCreds,omitempty" protobuf:"varint,4,opt,name=useSDKCreds"` +} + +// AzureArtifact is the location of a an Azure Storage artifact +type AzureArtifact struct { + AzureBlobContainer `json:",inline" protobuf:"bytes,1,opt,name=azureBlobContainer"` + + // Blob is the blob name (i.e., path) in the container where the artifact resides + Blob string `json:"blob" protobuf:"bytes,2,opt,name=blob"` +} + +func (a *AzureArtifact) GetKey() (string, error) { + return a.Blob, nil +} + +func (a *AzureArtifact) SetKey(key string) error { + a.Blob = key + return nil +} + +func (a *AzureArtifact) HasLocation() bool { + return a != nil && a.Container != "" && a.Blob != "" +} + // HDFSArtifact is the location of an HDFS artifact type HDFSArtifact struct { HDFSConfig `json:",inline" protobuf:"bytes,1,opt,name=hDFSConfig"` @@ -2269,13 +2604,55 @@ type Header struct { Value string `json:"value" protobuf:"bytes,2,opt,name=value"` } -// HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +// BasicAuth describes the secret selectors required for basic authentication +type BasicAuth struct { + // UsernameSecret is the secret selector to the repository username + UsernameSecret *apiv1.SecretKeySelector `json:"usernameSecret,omitempty" protobuf:"bytes,1,opt,name=usernameSecret"` + + // PasswordSecret is the secret selector to the repository password + PasswordSecret *apiv1.SecretKeySelector `json:"passwordSecret,omitempty" protobuf:"bytes,2,opt,name=passwordSecret"` +} + +// ClientCertAuth holds necessary information for client authentication via certificates +type ClientCertAuth struct { + ClientCertSecret *apiv1.SecretKeySelector `json:"clientCertSecret,omitempty" protobuf:"bytes,1,opt,name=clientCertSecret"` + ClientKeySecret *apiv1.SecretKeySelector `json:"clientKeySecret,omitempty" protobuf:"bytes,2,opt,name=clientKeySecret"` +} + +// OAuth2Auth holds all information for client authentication via OAuth2 tokens +type OAuth2Auth struct { + ClientIDSecret *apiv1.SecretKeySelector `json:"clientIDSecret,omitempty" protobuf:"bytes,1,opt,name=clientIDSecret"` + ClientSecretSecret *apiv1.SecretKeySelector `json:"clientSecretSecret,omitempty" protobuf:"bytes,2,opt,name=clientSecretSecret"` + TokenURLSecret *apiv1.SecretKeySelector `json:"tokenURLSecret,omitempty" protobuf:"bytes,3,opt,name=tokenURLSecret"` + Scopes []string `json:"scopes,omitempty" protobuf:"bytes,5,rep,name=scopes"` + EndpointParams []OAuth2EndpointParam `json:"endpointParams,omitempty" protobuf:"bytes,6,rep,name=endpointParams"` +} + +// EndpointParam is for requesting optional fields that should be sent in the oauth request +type OAuth2EndpointParam struct { + // Name is the header name + Key string `json:"key" protobuf:"bytes,1,opt,name=key"` + + // Value is the literal value to use for the header + Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"` +} + +type HTTPAuth struct { + ClientCert ClientCertAuth `json:"clientCert,omitempty" protobuf:"bytes,1,opt,name=clientCert"` + OAuth2 OAuth2Auth `json:"oauth2,omitempty" protobuf:"bytes,2,opt,name=oauth2"` + BasicAuth BasicAuth `json:"basicAuth,omitempty" protobuf:"bytes,3,opt,name=basicAuth"` +} + +// HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container type HTTPArtifact struct { // URL of the artifact URL string `json:"url" protobuf:"bytes,1,opt,name=url"` // Headers are an optional list of headers to send with HTTP requests for artifacts - Headers []Header `json:"headers,omitempty" protobuf:"bytes,2,opt,name=headers"` + Headers []Header `json:"headers,omitempty" protobuf:"bytes,2,rep,name=headers"` + + // Auth contains information for client authentication + Auth *HTTPAuth `json:"auth,omitempty" protobuf:"bytes,3,opt,name=auth"` } func (h *HTTPArtifact) GetKey() (string, error) { @@ -2411,6 +2788,9 @@ type ResourceTemplate struct { // Manifest contains the kubernetes manifest Manifest string `json:"manifest,omitempty" protobuf:"bytes,3,opt,name=manifest"` + // ManifestFrom is the source for a single kubernetes manifest + ManifestFrom *ManifestFrom `json:"manifestFrom,omitempty" protobuf:"bytes,8,opt,name=manifestFrom"` + // SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. SetOwnerReference bool `json:"setOwnerReference,omitempty" protobuf:"varint,4,opt,name=setOwnerReference"` @@ -2430,6 +2810,11 @@ type ResourceTemplate struct { Flags []string `json:"flags,omitempty" protobuf:"varint,7,opt,name=flags"` } +type ManifestFrom struct { + // Artifact contains the artifact to use + Artifact *Artifact `json:"artifact" protobuf:"bytes,1,opt,name=artifact"` +} + // GetType returns the type of this template func (tmpl *Template) GetType() TemplateType { if tmpl.Container != nil { @@ -2549,7 +2934,7 @@ func (t *Template) IsDaemon() bool { // if logs should be saved as an artifact func (tmpl *Template) SaveLogsAsArtifact() bool { - return tmpl != nil && tmpl.ArchiveLocation.IsArchiveLogs() && (tmpl.ContainerSet == nil || tmpl.ContainerSet.HasContainerNamed("main")) + return tmpl != nil && tmpl.ArchiveLocation.IsArchiveLogs() } func (t *Template) GetRetryStrategy() (wait.Backoff, error) { @@ -2630,6 +3015,18 @@ type DAGTask struct { Hooks LifecycleHooks `json:"hooks,omitempty" protobuf:"bytes,13,opt,name=hooks"` } +func (t *DAGTask) GetName() string { + return t.Name +} + +func (t *DAGTask) IsDAGTask() bool { + return true +} + +func (t *DAGTask) IsWorkflowStep() bool { + return false +} + var _ TemplateReferenceHolder = &DAGTask{} func (t *DAGTask) GetExitHook(args Arguments) *LifecycleHook { @@ -2736,14 +3133,14 @@ func (out *Outputs) HasParameters() bool { return out != nil && len(out.Parameters) > 0 } -const MainLogsArtifactName = "main-logs" +const LogsSuffix = "-logs" func (out *Outputs) HasLogs() bool { if out == nil { return false } for _, a := range out.Artifacts { - if a.Name == MainLogsArtifactName { + if strings.HasSuffix(a.Name, LogsSuffix) { return true } } @@ -2794,6 +3191,13 @@ func (wf *Workflow) GetTemplateByName(name string) *Template { return nil } +func (w *Workflow) GetTemplates() []Template { + return append( + w.GetExecSpec().Templates, + w.Status.GetStoredTemplates()..., + ) +} + func (wf *Workflow) GetNodeByName(nodeName string) *NodeStatus { nodeID := wf.NodeID(nodeName) node, ok := wf.Status.Nodes[nodeID] diff --git a/pkg/apis/workflow/v1alpha1/workflow_types_test.go b/pkg/apis/workflow/v1alpha1/workflow_types_test.go index 43d68c45101b..d4881edba48c 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types_test.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types_test.go @@ -1,6 +1,7 @@ package v1alpha1 import ( + "fmt" "sort" "testing" "time" @@ -116,6 +117,175 @@ func TestWorkflowHappenedBetween(t *testing.T) { })) } +func TestWorkflowHasArtifactGC(t *testing.T) { + tests := []struct { + name string + workflowArtGCStrategySpec string + artifactGCStrategySpec string + expectedResult bool + }{ + { + name: "WorkflowSpecGC_Completion", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowCompletion`, + artifactGCStrategySpec: "", + expectedResult: true, + }, + { + name: "ArtifactSpecGC_Completion", + workflowArtGCStrategySpec: "", + artifactGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowCompletion`, + expectedResult: true, + }, + { + name: "WorkflowSpecGC_Deletion", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowDeletion`, + artifactGCStrategySpec: "", + expectedResult: true, + }, + { + name: "ArtifactSpecGC_Deletion", + workflowArtGCStrategySpec: "", + artifactGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowDeletion`, + expectedResult: true, + }, + { + name: "NoGC", + workflowArtGCStrategySpec: "", + artifactGCStrategySpec: "", + expectedResult: false, + }, + { + name: "WorkflowSpecGCNone", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: ""`, + artifactGCStrategySpec: "", + expectedResult: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + workflowSpec := fmt.Sprintf(` + apiVersion: argoproj.io/v1alpha1 + kind: Workflow + metadata: + generateName: artifact-passing- + spec: + entrypoint: whalesay + %s + templates: + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["sleep 1; cowsay hello world | tee /tmp/hello_world.txt"] + outputs: + artifacts: + - name: out + path: /out + s3: + key: out + %s`, tt.workflowArtGCStrategySpec, tt.artifactGCStrategySpec) + + wf := MustUnmarshalWorkflow(workflowSpec) + + hasArtifact := wf.HasArtifactGC() + + assert.Equal(t, hasArtifact, tt.expectedResult) + }) + } + +} + +func TestWorkflowGetArtifactGCStrategy(t *testing.T) { + tests := []struct { + name string + workflowArtGCStrategySpec string + artifactGCStrategySpec string + expectedStrategy ArtifactGCStrategy + }{ + { + name: "WorkflowLevel", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowCompletion`, + artifactGCStrategySpec: "", + expectedStrategy: ArtifactGCOnWorkflowCompletion, + }, + { + name: "ArtifactOverride", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: OnWorkflowCompletion`, + artifactGCStrategySpec: ` + artifactGC: + strategy: Never`, + expectedStrategy: ArtifactGCNever, + }, + { + name: "NotDefined", + workflowArtGCStrategySpec: ` + artifactGC:`, + artifactGCStrategySpec: ` + artifactGC:`, + expectedStrategy: ArtifactGCNever, + }, + { + name: "NotDefined2", + workflowArtGCStrategySpec: ` + artifactGC: + strategy: ""`, + artifactGCStrategySpec: ` + artifactGC: + strategy: ""`, + expectedStrategy: ArtifactGCNever, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + workflowSpec := fmt.Sprintf(` + apiVersion: argoproj.io/v1alpha1 + kind: Workflow + metadata: + generateName: artifact-passing- + spec: + entrypoint: whalesay + %s + templates: + - name: whalesay + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["sleep 1; cowsay hello world | tee /tmp/hello_world.txt"] + outputs: + artifacts: + - name: out + path: /out + s3: + key: out + %s`, tt.workflowArtGCStrategySpec, tt.artifactGCStrategySpec) + + wf := MustUnmarshalWorkflow(workflowSpec) + a := wf.Spec.Templates[0].Outputs.Artifacts[0] + gcStrategy := wf.GetArtifactGCStrategy(&a) + assert.Equal(t, tt.expectedStrategy, gcStrategy) + }) + } + +} + func TestArtifact_ValidatePath(t *testing.T) { t.Run("empty path fails", func(t *testing.T) { a1 := Artifact{Name: "a1", Path: ""} @@ -247,6 +417,15 @@ func TestArtifactoryArtifact(t *testing.T) { assert.Equal(t, "/my-key", key, "has leading slash") } +func TestAzureArtifact(t *testing.T) { + a := &AzureArtifact{Blob: "my-blob", AzureBlobContainer: AzureBlobContainer{Container: "my-container"}} + assert.True(t, a.HasLocation()) + assert.NoError(t, a.SetKey("my-blob")) + key, err := a.GetKey() + assert.NoError(t, err) + assert.Equal(t, "my-blob", key) +} + func TestGitArtifact(t *testing.T) { a := &GitArtifact{Repo: "my-repo"} assert.True(t, a.HasLocation()) @@ -346,6 +525,9 @@ func TestArtifactLocation_Get(t *testing.T) { assert.Nil(t, v) assert.EqualError(t, err, "You need to configure artifact storage. More information on how to do this can be found in the docs: https://argoproj.github.io/argo-workflows/configure-artifact-repository/") + v, _ = (&ArtifactLocation{Azure: &AzureArtifact{}}).Get() + assert.IsType(t, &AzureArtifact{}, v) + v, _ = (&ArtifactLocation{Git: &GitArtifact{}}).Get() assert.IsType(t, &GitArtifact{}, v) @@ -378,6 +560,11 @@ func TestArtifactLocation_SetType(t *testing.T) { assert.NoError(t, l.SetType(&ArtifactoryArtifact{})) assert.NotNil(t, l.Artifactory) }) + t.Run("Azure", func(t *testing.T) { + l := &ArtifactLocation{} + assert.NoError(t, l.SetType(&AzureArtifact{})) + assert.NotNil(t, l.Azure) + }) t.Run("GCS", func(t *testing.T) { l := &ArtifactLocation{} assert.NoError(t, l.SetType(&GCSArtifact{})) @@ -408,6 +595,11 @@ func TestArtifactLocation_SetType(t *testing.T) { assert.NoError(t, l.SetType(&S3Artifact{})) assert.NotNil(t, l.S3) }) + t.Run("Azure", func(t *testing.T) { + l := &ArtifactLocation{} + assert.NoError(t, l.SetType(&AzureArtifact{})) + assert.NotNil(t, l.Azure) + }) } func TestArtifactLocation_Key(t *testing.T) { @@ -434,6 +626,12 @@ func TestArtifactLocation_Key(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "http://my-host/my-dir/my-file?a=1", l.Artifactory.URL, "appends to Artifactory path") }) + t.Run("Azure", func(t *testing.T) { + l := &ArtifactLocation{Azure: &AzureArtifact{Blob: "my-dir"}} + err := l.AppendToKey("my-file") + assert.NoError(t, err) + assert.Equal(t, "my-dir/my-file", l.Azure.Blob, "appends to Azure Blob name") + }) t.Run("Git", func(t *testing.T) { l := &ArtifactLocation{Git: &GitArtifact{}} assert.False(t, l.HasKey()) @@ -516,6 +714,21 @@ func TestArtifact_GetArchive(t *testing.T) { assert.Equal(t, &ArchiveStrategy{None: &NoneStrategy{}}, (&Artifact{Archive: &ArchiveStrategy{None: &NoneStrategy{}}}).GetArchive()) } +func TestArtifactGC_GetStrategy(t *testing.T) { + t.Run("Nil", func(t *testing.T) { + var artifactGC *ArtifactGC + assert.Equal(t, ArtifactGCStrategyUndefined, artifactGC.GetStrategy()) + }) + t.Run("Unspecified", func(t *testing.T) { + var artifactGC = &ArtifactGC{} + assert.Equal(t, ArtifactGCStrategyUndefined, artifactGC.GetStrategy()) + }) + t.Run("Specified", func(t *testing.T) { + var artifactGC = &ArtifactGC{Strategy: ArtifactGCOnWorkflowCompletion} + assert.Equal(t, ArtifactGCOnWorkflowCompletion, artifactGC.GetStrategy()) + }) +} + func TestPodGCStrategy_IsValid(t *testing.T) { for _, s := range []PodGCStrategy{ PodGCOnPodNone, @@ -753,6 +966,168 @@ func TestPrometheus_GetDescIsStable(t *testing.T) { } } +func TestWorkflow_SearchArtifacts(t *testing.T) { + wf := Workflow{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test", + Namespace: "test", + }, + Spec: WorkflowSpec{ + ArtifactGC: &ArtifactGC{ + Strategy: ArtifactGCOnWorkflowCompletion, + }, + Templates: []Template{ + { + Name: "template-foo", + Outputs: Outputs{ + Artifacts: Artifacts{ + Artifact{Name: "artifact-foo"}, + Artifact{Name: "artifact-bar", ArtifactGC: &ArtifactGC{Strategy: ArtifactGCOnWorkflowDeletion}}, + }, + }, + }, + { + Name: "template-bar", + Outputs: Outputs{ + Artifacts: Artifacts{ + Artifact{Name: "artifact-foobar"}, + }, + }, + }, + }, + }, + Status: WorkflowStatus{ + Nodes: Nodes{ + "test-foo": NodeStatus{ + ID: "node-foo", + TemplateName: "template-foo", + Outputs: &Outputs{ + Artifacts: Artifacts{ + Artifact{Name: "artifact-foo"}, + Artifact{Name: "artifact-bar", ArtifactGC: &ArtifactGC{Strategy: ArtifactGCOnWorkflowDeletion}}, + }, + }, + }, + "test-bar": NodeStatus{ + ID: "node-bar", + TemplateName: "template-bar", + Outputs: &Outputs{ + Artifacts: Artifacts{ + Artifact{Name: "artifact-foobar"}, + }, + }, + }, + }, + }, + } + + query := NewArtifactSearchQuery() + + countArtifactName := func(ars ArtifactSearchResults, name string) int { + count := 0 + for _, ar := range ars { + if ar.Artifact.Name == name { + count++ + } + } + return count + } + countNodeID := func(ars ArtifactSearchResults, nodeID string) int { + count := 0 + for _, ar := range ars { + if ar.NodeID == nodeID { + count++ + } + } + return count + } + + // no filters + queriedArtifactSearchResults := wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 3) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-foo")) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-bar")) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-foobar")) + assert.Equal(t, 2, countNodeID(queriedArtifactSearchResults, "node-foo")) + assert.Equal(t, 1, countNodeID(queriedArtifactSearchResults, "node-bar")) + + // artifactGC strategy: OnWorkflowCompletion + query.ArtifactGCStrategies[ArtifactGCOnWorkflowCompletion] = true + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 2) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-foo")) + assert.Equal(t, 0, countArtifactName(queriedArtifactSearchResults, "artifact-bar")) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-foobar")) + assert.Equal(t, 1, countNodeID(queriedArtifactSearchResults, "node-foo")) + assert.Equal(t, 1, countNodeID(queriedArtifactSearchResults, "node-bar")) + + // artifactGC strategy: OnWorkflowDeletion + query = NewArtifactSearchQuery() + query.ArtifactGCStrategies[ArtifactGCOnWorkflowDeletion] = true + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 1) + assert.Equal(t, 0, countArtifactName(queriedArtifactSearchResults, "artifact-foo")) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-bar")) + assert.Equal(t, 0, countArtifactName(queriedArtifactSearchResults, "artifact-foobar")) + assert.Equal(t, 1, countNodeID(queriedArtifactSearchResults, "node-foo")) + assert.Equal(t, 0, countNodeID(queriedArtifactSearchResults, "node-bar")) + + // template name + query = NewArtifactSearchQuery() + query.TemplateName = "template-bar" + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 1) + assert.Equal(t, "artifact-foobar", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "node-bar", queriedArtifactSearchResults[0].NodeID) + + // artifact name + query = NewArtifactSearchQuery() + query.ArtifactName = "artifact-foo" + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 1) + assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "node-foo", queriedArtifactSearchResults[0].NodeID) + + // node id + query = NewArtifactSearchQuery() + query.NodeId = "node-foo" + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 2) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-foo")) + assert.Equal(t, 1, countArtifactName(queriedArtifactSearchResults, "artifact-bar")) + assert.Equal(t, 2, countNodeID(queriedArtifactSearchResults, "node-foo")) + + // bad query + query = NewArtifactSearchQuery() + query.NodeId = "node-foobar" + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.Nil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 0) + + // template and artifact name + query = NewArtifactSearchQuery() + query.TemplateName = "template-foo" + query.ArtifactName = "artifact-foo" + queriedArtifactSearchResults = wf.SearchArtifacts(query) + assert.NotNil(t, queriedArtifactSearchResults) + assert.Len(t, queriedArtifactSearchResults, 1) + assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "node-foo", queriedArtifactSearchResults[0].NodeID) +} + +func TestWorkflowSpec_GetArtifactGC(t *testing.T) { + spec := WorkflowSpec{} + + assert.NotNil(t, spec.GetArtifactGC()) + assert.Equal(t, &ArtifactGC{Strategy: ArtifactGCStrategyUndefined}, spec.GetArtifactGC()) +} + func TestWorkflowSpec_GetVolumeGC(t *testing.T) { spec := WorkflowSpec{} @@ -945,16 +1320,6 @@ func TestTemplate_SaveLogsAsArtifact(t *testing.T) { x := &Template{ArchiveLocation: &ArtifactLocation{ArchiveLogs: pointer.BoolPtr(true)}} assert.True(t, x.SaveLogsAsArtifact()) }) - t.Run("ContainerSet", func(t *testing.T) { - t.Run("NoMain", func(t *testing.T) { - x := &Template{ArchiveLocation: &ArtifactLocation{ArchiveLogs: pointer.BoolPtr(true)}, ContainerSet: &ContainerSetTemplate{}} - assert.False(t, x.SaveLogsAsArtifact()) - }) - t.Run("Main", func(t *testing.T) { - x := &Template{ArchiveLocation: &ArtifactLocation{ArchiveLogs: pointer.BoolPtr(true)}, ContainerSet: &ContainerSetTemplate{Containers: []ContainerNode{{Container: corev1.Container{Name: "main"}}}}} - assert.True(t, x.SaveLogsAsArtifact()) - }) - }) } func TestTemplate_ExcludeTemplateTypes(t *testing.T) { diff --git a/pkg/apis/workflow/v1alpha1/zz_generated.deepcopy.go b/pkg/apis/workflow/v1alpha1/zz_generated.deepcopy.go index 0609523dd4dc..d9603259d358 100644 --- a/pkg/apis/workflow/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/apis/workflow/v1alpha1/zz_generated.deepcopy.go @@ -92,6 +92,36 @@ func (in *Arguments) DeepCopy() *Arguments { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtGCStatus) DeepCopyInto(out *ArtGCStatus) { + *out = *in + if in.StrategiesProcessed != nil { + in, out := &in.StrategiesProcessed, &out.StrategiesProcessed + *out = make(map[ArtifactGCStrategy]bool, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.PodsRecouped != nil { + in, out := &in.PodsRecouped, &out.PodsRecouped + *out = make(map[string]bool, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtGCStatus. +func (in *ArtGCStatus) DeepCopy() *ArtGCStatus { + if in == nil { + return nil + } + out := new(ArtGCStatus) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Artifact) DeepCopyInto(out *Artifact) { *out = *in @@ -106,6 +136,11 @@ func (in *Artifact) DeepCopyInto(out *Artifact) { *out = new(ArchiveStrategy) (*in).DeepCopyInto(*out) } + if in.ArtifactGC != nil { + in, out := &in.ArtifactGC, &out.ArtifactGC + *out = new(ArtifactGC) + (*in).DeepCopyInto(*out) + } return } @@ -119,6 +154,73 @@ func (in *Artifact) DeepCopy() *Artifact { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactGC) DeepCopyInto(out *ArtifactGC) { + *out = *in + if in.PodMetadata != nil { + in, out := &in.PodMetadata, &out.PodMetadata + *out = new(Metadata) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactGC. +func (in *ArtifactGC) DeepCopy() *ArtifactGC { + if in == nil { + return nil + } + out := new(ArtifactGC) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactGCSpec) DeepCopyInto(out *ArtifactGCSpec) { + *out = *in + if in.ArtifactsByNode != nil { + in, out := &in.ArtifactsByNode, &out.ArtifactsByNode + *out = make(map[string]ArtifactNodeSpec, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactGCSpec. +func (in *ArtifactGCSpec) DeepCopy() *ArtifactGCSpec { + if in == nil { + return nil + } + out := new(ArtifactGCSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactGCStatus) DeepCopyInto(out *ArtifactGCStatus) { + *out = *in + if in.ArtifactResultsByNode != nil { + in, out := &in.ArtifactResultsByNode, &out.ArtifactResultsByNode + *out = make(map[string]ArtifactResultNodeStatus, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactGCStatus. +func (in *ArtifactGCStatus) DeepCopy() *ArtifactGCStatus { + if in == nil { + return nil + } + out := new(ArtifactGCStatus) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ArtifactLocation) DeepCopyInto(out *ArtifactLocation) { *out = *in @@ -167,6 +269,11 @@ func (in *ArtifactLocation) DeepCopyInto(out *ArtifactLocation) { *out = new(GCSArtifact) (*in).DeepCopyInto(*out) } + if in.Azure != nil { + in, out := &in.Azure, &out.Azure + *out = new(AzureArtifact) + (*in).DeepCopyInto(*out) + } return } @@ -180,6 +287,34 @@ func (in *ArtifactLocation) DeepCopy() *ArtifactLocation { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactNodeSpec) DeepCopyInto(out *ArtifactNodeSpec) { + *out = *in + if in.ArchiveLocation != nil { + in, out := &in.ArchiveLocation, &out.ArchiveLocation + *out = new(ArtifactLocation) + (*in).DeepCopyInto(*out) + } + if in.Artifacts != nil { + in, out := &in.Artifacts, &out.Artifacts + *out = make(map[string]Artifact, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactNodeSpec. +func (in *ArtifactNodeSpec) DeepCopy() *ArtifactNodeSpec { + if in == nil { + return nil + } + out := new(ArtifactNodeSpec) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ArtifactPaths) DeepCopyInto(out *ArtifactPaths) { *out = *in @@ -230,6 +365,11 @@ func (in *ArtifactRepository) DeepCopyInto(out *ArtifactRepository) { *out = new(GCSArtifactRepository) (*in).DeepCopyInto(*out) } + if in.Azure != nil { + in, out := &in.Azure, &out.Azure + *out = new(AzureArtifactRepository) + (*in).DeepCopyInto(*out) + } return } @@ -281,6 +421,124 @@ func (in *ArtifactRepositoryRefStatus) DeepCopy() *ArtifactRepositoryRefStatus { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactResult) DeepCopyInto(out *ArtifactResult) { + *out = *in + if in.Error != nil { + in, out := &in.Error, &out.Error + *out = new(string) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactResult. +func (in *ArtifactResult) DeepCopy() *ArtifactResult { + if in == nil { + return nil + } + out := new(ArtifactResult) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactResultNodeStatus) DeepCopyInto(out *ArtifactResultNodeStatus) { + *out = *in + if in.ArtifactResults != nil { + in, out := &in.ArtifactResults, &out.ArtifactResults + *out = make(map[string]ArtifactResult, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactResultNodeStatus. +func (in *ArtifactResultNodeStatus) DeepCopy() *ArtifactResultNodeStatus { + if in == nil { + return nil + } + out := new(ArtifactResultNodeStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactSearchQuery) DeepCopyInto(out *ArtifactSearchQuery) { + *out = *in + if in.ArtifactGCStrategies != nil { + in, out := &in.ArtifactGCStrategies, &out.ArtifactGCStrategies + *out = make(map[ArtifactGCStrategy]bool, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.Deleted != nil { + in, out := &in.Deleted, &out.Deleted + *out = new(bool) + **out = **in + } + if in.NodeTypes != nil { + in, out := &in.NodeTypes, &out.NodeTypes + *out = make(map[NodeType]bool, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactSearchQuery. +func (in *ArtifactSearchQuery) DeepCopy() *ArtifactSearchQuery { + if in == nil { + return nil + } + out := new(ArtifactSearchQuery) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArtifactSearchResult) DeepCopyInto(out *ArtifactSearchResult) { + *out = *in + in.Artifact.DeepCopyInto(&out.Artifact) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactSearchResult. +func (in *ArtifactSearchResult) DeepCopy() *ArtifactSearchResult { + if in == nil { + return nil + } + out := new(ArtifactSearchResult) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in ArtifactSearchResults) DeepCopyInto(out *ArtifactSearchResults) { + { + in := &in + *out = make(ArtifactSearchResults, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactSearchResults. +func (in ArtifactSearchResults) DeepCopy() ArtifactSearchResults { + if in == nil { + return nil + } + out := new(ArtifactSearchResults) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ArtifactoryArtifact) DeepCopyInto(out *ArtifactoryArtifact) { *out = *in @@ -363,6 +621,61 @@ func (in Artifacts) DeepCopy() Artifacts { return *out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AzureArtifact) DeepCopyInto(out *AzureArtifact) { + *out = *in + in.AzureBlobContainer.DeepCopyInto(&out.AzureBlobContainer) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureArtifact. +func (in *AzureArtifact) DeepCopy() *AzureArtifact { + if in == nil { + return nil + } + out := new(AzureArtifact) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AzureArtifactRepository) DeepCopyInto(out *AzureArtifactRepository) { + *out = *in + in.AzureBlobContainer.DeepCopyInto(&out.AzureBlobContainer) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureArtifactRepository. +func (in *AzureArtifactRepository) DeepCopy() *AzureArtifactRepository { + if in == nil { + return nil + } + out := new(AzureArtifactRepository) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AzureBlobContainer) DeepCopyInto(out *AzureBlobContainer) { + *out = *in + if in.AccountKeySecret != nil { + in, out := &in.AccountKeySecret, &out.AccountKeySecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureBlobContainer. +func (in *AzureBlobContainer) DeepCopy() *AzureBlobContainer { + if in == nil { + return nil + } + out := new(AzureBlobContainer) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Backoff) DeepCopyInto(out *Backoff) { *out = *in @@ -384,6 +697,32 @@ func (in *Backoff) DeepCopy() *Backoff { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BasicAuth) DeepCopyInto(out *BasicAuth) { + *out = *in + if in.UsernameSecret != nil { + in, out := &in.UsernameSecret, &out.UsernameSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + if in.PasswordSecret != nil { + in, out := &in.PasswordSecret, &out.PasswordSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAuth. +func (in *BasicAuth) DeepCopy() *BasicAuth { + if in == nil { + return nil + } + out := new(BasicAuth) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Cache) DeepCopyInto(out *Cache) { *out = *in @@ -405,6 +744,32 @@ func (in *Cache) DeepCopy() *Cache { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClientCertAuth) DeepCopyInto(out *ClientCertAuth) { + *out = *in + if in.ClientCertSecret != nil { + in, out := &in.ClientCertSecret, &out.ClientCertSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + if in.ClientKeySecret != nil { + in, out := &in.ClientKeySecret, &out.ClientKeySecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClientCertAuth. +func (in *ClientCertAuth) DeepCopy() *ClientCertAuth { + if in == nil { + return nil + } + out := new(ClientCertAuth) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ClusterWorkflowTemplate) DeepCopyInto(out *ClusterWorkflowTemplate) { *out = *in @@ -1155,6 +1520,11 @@ func (in *HTTP) DeepCopyInto(out *HTTP) { *out = new(int64) **out = **in } + if in.BodyFrom != nil { + in, out := &in.BodyFrom, &out.BodyFrom + *out = new(HTTPBodySource) + (*in).DeepCopyInto(*out) + } return } @@ -1176,6 +1546,11 @@ func (in *HTTPArtifact) DeepCopyInto(out *HTTPArtifact) { *out = make([]Header, len(*in)) copy(*out, *in) } + if in.Auth != nil { + in, out := &in.Auth, &out.Auth + *out = new(HTTPAuth) + (*in).DeepCopyInto(*out) + } return } @@ -1189,6 +1564,46 @@ func (in *HTTPArtifact) DeepCopy() *HTTPArtifact { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *HTTPAuth) DeepCopyInto(out *HTTPAuth) { + *out = *in + in.ClientCert.DeepCopyInto(&out.ClientCert) + in.OAuth2.DeepCopyInto(&out.OAuth2) + in.BasicAuth.DeepCopyInto(&out.BasicAuth) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HTTPAuth. +func (in *HTTPAuth) DeepCopy() *HTTPAuth { + if in == nil { + return nil + } + out := new(HTTPAuth) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *HTTPBodySource) DeepCopyInto(out *HTTPBodySource) { + *out = *in + if in.Bytes != nil { + in, out := &in.Bytes, &out.Bytes + *out = make([]byte, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HTTPBodySource. +func (in *HTTPBodySource) DeepCopy() *HTTPBodySource { + if in == nil { + return nil + } + out := new(HTTPBodySource) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *HTTPHeader) DeepCopyInto(out *HTTPHeader) { *out = *in @@ -1448,6 +1863,27 @@ func (in *Link) DeepCopy() *Link { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ManifestFrom) DeepCopyInto(out *ManifestFrom) { + *out = *in + if in.Artifact != nil { + in, out := &in.Artifact, &out.Artifact + *out = new(Artifact) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManifestFrom. +func (in *ManifestFrom) DeepCopy() *ManifestFrom { + if in == nil { + return nil + } + out := new(ManifestFrom) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MemoizationStatus) DeepCopyInto(out *MemoizationStatus) { *out = *in @@ -1756,6 +2192,63 @@ func (in *NoneStrategy) DeepCopy() *NoneStrategy { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OAuth2Auth) DeepCopyInto(out *OAuth2Auth) { + *out = *in + if in.ClientIDSecret != nil { + in, out := &in.ClientIDSecret, &out.ClientIDSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + if in.ClientSecretSecret != nil { + in, out := &in.ClientSecretSecret, &out.ClientSecretSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + if in.TokenURLSecret != nil { + in, out := &in.TokenURLSecret, &out.TokenURLSecret + *out = new(v1.SecretKeySelector) + (*in).DeepCopyInto(*out) + } + if in.Scopes != nil { + in, out := &in.Scopes, &out.Scopes + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.EndpointParams != nil { + in, out := &in.EndpointParams, &out.EndpointParams + *out = make([]OAuth2EndpointParam, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2Auth. +func (in *OAuth2Auth) DeepCopy() *OAuth2Auth { + if in == nil { + return nil + } + out := new(OAuth2Auth) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OAuth2EndpointParam) DeepCopyInto(out *OAuth2EndpointParam) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2EndpointParam. +func (in *OAuth2EndpointParam) DeepCopy() *OAuth2EndpointParam { + if in == nil { + return nil + } + out := new(OAuth2EndpointParam) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *OSSArtifact) DeepCopyInto(out *OSSArtifact) { *out = *in @@ -2061,6 +2554,11 @@ func (in *RawArtifact) DeepCopy() *RawArtifact { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ResourceTemplate) DeepCopyInto(out *ResourceTemplate) { *out = *in + if in.ManifestFrom != nil { + in, out := &in.ManifestFrom, &out.ManifestFrom + *out = new(ManifestFrom) + (*in).DeepCopyInto(*out) + } if in.Flags != nil { in, out := &in.Flags, &out.Flags *out = make([]string, len(*in)) @@ -2923,6 +3421,67 @@ func (in *Workflow) DeepCopyObject() runtime.Object { return nil } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkflowArtifactGCTask) DeepCopyInto(out *WorkflowArtifactGCTask) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowArtifactGCTask. +func (in *WorkflowArtifactGCTask) DeepCopy() *WorkflowArtifactGCTask { + if in == nil { + return nil + } + out := new(WorkflowArtifactGCTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *WorkflowArtifactGCTask) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkflowArtifactGCTaskList) DeepCopyInto(out *WorkflowArtifactGCTaskList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]WorkflowArtifactGCTask, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowArtifactGCTaskList. +func (in *WorkflowArtifactGCTaskList) DeepCopy() *WorkflowArtifactGCTaskList { + if in == nil { + return nil + } + out := new(WorkflowArtifactGCTaskList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *WorkflowArtifactGCTaskList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *WorkflowEventBinding) DeepCopyInto(out *WorkflowEventBinding) { *out = *in @@ -3258,6 +3817,11 @@ func (in *WorkflowSpec) DeepCopyInto(out *WorkflowSpec) { *out = new(WorkflowMetadata) (*in).DeepCopyInto(*out) } + if in.ArtifactGC != nil { + in, out := &in.ArtifactGC, &out.ArtifactGC + *out = new(ArtifactGC) + (*in).DeepCopyInto(*out) + } return } @@ -3329,6 +3893,11 @@ func (in *WorkflowStatus) DeepCopyInto(out *WorkflowStatus) { *out = new(ArtifactRepositoryRefStatus) (*in).DeepCopyInto(*out) } + if in.ArtifactGCStatus != nil { + in, out := &in.ArtifactGCStatus, &out.ArtifactGCStatus + *out = new(ArtGCStatus) + (*in).DeepCopyInto(*out) + } return } diff --git a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflow_client.go b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflow_client.go index 00122b2659db..bf529f97865d 100644 --- a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflow_client.go +++ b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflow_client.go @@ -24,6 +24,10 @@ func (c *FakeArgoprojV1alpha1) Workflows(namespace string) v1alpha1.WorkflowInte return &FakeWorkflows{c, namespace} } +func (c *FakeArgoprojV1alpha1) WorkflowArtifactGCTasks(namespace string) v1alpha1.WorkflowArtifactGCTaskInterface { + return &FakeWorkflowArtifactGCTasks{c, namespace} +} + func (c *FakeArgoprojV1alpha1) WorkflowEventBindings(namespace string) v1alpha1.WorkflowEventBindingInterface { return &FakeWorkflowEventBindings{c, namespace} } diff --git a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflowartifactgctask.go b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflowartifactgctask.go new file mode 100644 index 000000000000..4f57cb6a3d44 --- /dev/null +++ b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/fake/fake_workflowartifactgctask.go @@ -0,0 +1,126 @@ +// Code generated by client-gen. DO NOT EDIT. + +package fake + +import ( + "context" + + v1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + schema "k8s.io/apimachinery/pkg/runtime/schema" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + testing "k8s.io/client-go/testing" +) + +// FakeWorkflowArtifactGCTasks implements WorkflowArtifactGCTaskInterface +type FakeWorkflowArtifactGCTasks struct { + Fake *FakeArgoprojV1alpha1 + ns string +} + +var workflowartifactgctasksResource = schema.GroupVersionResource{Group: "argoproj.io", Version: "v1alpha1", Resource: "workflowartifactgctasks"} + +var workflowartifactgctasksKind = schema.GroupVersionKind{Group: "argoproj.io", Version: "v1alpha1", Kind: "WorkflowArtifactGCTask"} + +// Get takes name of the workflowArtifactGCTask, and returns the corresponding workflowArtifactGCTask object, and an error if there is any. +func (c *FakeWorkflowArtifactGCTasks) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + obj, err := c.Fake. + Invokes(testing.NewGetAction(workflowartifactgctasksResource, c.ns, name), &v1alpha1.WorkflowArtifactGCTask{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), err +} + +// List takes label and field selectors, and returns the list of WorkflowArtifactGCTasks that match those selectors. +func (c *FakeWorkflowArtifactGCTasks) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.WorkflowArtifactGCTaskList, err error) { + obj, err := c.Fake. + Invokes(testing.NewListAction(workflowartifactgctasksResource, workflowartifactgctasksKind, c.ns, opts), &v1alpha1.WorkflowArtifactGCTaskList{}) + + if obj == nil { + return nil, err + } + + label, _, _ := testing.ExtractFromListOptions(opts) + if label == nil { + label = labels.Everything() + } + list := &v1alpha1.WorkflowArtifactGCTaskList{ListMeta: obj.(*v1alpha1.WorkflowArtifactGCTaskList).ListMeta} + for _, item := range obj.(*v1alpha1.WorkflowArtifactGCTaskList).Items { + if label.Matches(labels.Set(item.Labels)) { + list.Items = append(list.Items, item) + } + } + return list, err +} + +// Watch returns a watch.Interface that watches the requested workflowArtifactGCTasks. +func (c *FakeWorkflowArtifactGCTasks) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { + return c.Fake. + InvokesWatch(testing.NewWatchAction(workflowartifactgctasksResource, c.ns, opts)) + +} + +// Create takes the representation of a workflowArtifactGCTask and creates it. Returns the server's representation of the workflowArtifactGCTask, and an error, if there is any. +func (c *FakeWorkflowArtifactGCTasks) Create(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.CreateOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + obj, err := c.Fake. + Invokes(testing.NewCreateAction(workflowartifactgctasksResource, c.ns, workflowArtifactGCTask), &v1alpha1.WorkflowArtifactGCTask{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), err +} + +// Update takes the representation of a workflowArtifactGCTask and updates it. Returns the server's representation of the workflowArtifactGCTask, and an error, if there is any. +func (c *FakeWorkflowArtifactGCTasks) Update(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateAction(workflowartifactgctasksResource, c.ns, workflowArtifactGCTask), &v1alpha1.WorkflowArtifactGCTask{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), err +} + +// UpdateStatus was generated because the type contains a Status member. +// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). +func (c *FakeWorkflowArtifactGCTasks) UpdateStatus(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (*v1alpha1.WorkflowArtifactGCTask, error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateSubresourceAction(workflowartifactgctasksResource, "status", c.ns, workflowArtifactGCTask), &v1alpha1.WorkflowArtifactGCTask{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), err +} + +// Delete takes name of the workflowArtifactGCTask and deletes it. Returns an error if one occurs. +func (c *FakeWorkflowArtifactGCTasks) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { + _, err := c.Fake. + Invokes(testing.NewDeleteAction(workflowartifactgctasksResource, c.ns, name), &v1alpha1.WorkflowArtifactGCTask{}) + + return err +} + +// DeleteCollection deletes a collection of objects. +func (c *FakeWorkflowArtifactGCTasks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { + action := testing.NewDeleteCollectionAction(workflowartifactgctasksResource, c.ns, listOpts) + + _, err := c.Fake.Invokes(action, &v1alpha1.WorkflowArtifactGCTaskList{}) + return err +} + +// Patch applies the patch and returns the patched workflowArtifactGCTask. +func (c *FakeWorkflowArtifactGCTasks) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + obj, err := c.Fake. + Invokes(testing.NewPatchSubresourceAction(workflowartifactgctasksResource, c.ns, name, pt, data, subresources...), &v1alpha1.WorkflowArtifactGCTask{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), err +} diff --git a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/generated_expansion.go b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/generated_expansion.go index 83010c69fc66..eb6fc2098539 100644 --- a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/generated_expansion.go +++ b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/generated_expansion.go @@ -8,6 +8,8 @@ type CronWorkflowExpansion interface{} type WorkflowExpansion interface{} +type WorkflowArtifactGCTaskExpansion interface{} + type WorkflowEventBindingExpansion interface{} type WorkflowTaskResultExpansion interface{} diff --git a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflow_client.go b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflow_client.go index 0c06e2ba7d8f..49da7948a1ba 100644 --- a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflow_client.go +++ b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflow_client.go @@ -13,6 +13,7 @@ type ArgoprojV1alpha1Interface interface { ClusterWorkflowTemplatesGetter CronWorkflowsGetter WorkflowsGetter + WorkflowArtifactGCTasksGetter WorkflowEventBindingsGetter WorkflowTaskResultsGetter WorkflowTaskSetsGetter @@ -36,6 +37,10 @@ func (c *ArgoprojV1alpha1Client) Workflows(namespace string) WorkflowInterface { return newWorkflows(c, namespace) } +func (c *ArgoprojV1alpha1Client) WorkflowArtifactGCTasks(namespace string) WorkflowArtifactGCTaskInterface { + return newWorkflowArtifactGCTasks(c, namespace) +} + func (c *ArgoprojV1alpha1Client) WorkflowEventBindings(namespace string) WorkflowEventBindingInterface { return newWorkflowEventBindings(c, namespace) } diff --git a/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflowartifactgctask.go b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflowartifactgctask.go new file mode 100644 index 000000000000..3d8e3c1f1f2c --- /dev/null +++ b/pkg/client/clientset/versioned/typed/workflow/v1alpha1/workflowartifactgctask.go @@ -0,0 +1,179 @@ +// Code generated by client-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + "context" + "time" + + v1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + scheme "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned/scheme" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + rest "k8s.io/client-go/rest" +) + +// WorkflowArtifactGCTasksGetter has a method to return a WorkflowArtifactGCTaskInterface. +// A group's client should implement this interface. +type WorkflowArtifactGCTasksGetter interface { + WorkflowArtifactGCTasks(namespace string) WorkflowArtifactGCTaskInterface +} + +// WorkflowArtifactGCTaskInterface has methods to work with WorkflowArtifactGCTask resources. +type WorkflowArtifactGCTaskInterface interface { + Create(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.CreateOptions) (*v1alpha1.WorkflowArtifactGCTask, error) + Update(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (*v1alpha1.WorkflowArtifactGCTask, error) + UpdateStatus(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (*v1alpha1.WorkflowArtifactGCTask, error) + Delete(ctx context.Context, name string, opts v1.DeleteOptions) error + DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error + Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.WorkflowArtifactGCTask, error) + List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.WorkflowArtifactGCTaskList, error) + Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) + Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkflowArtifactGCTask, err error) + WorkflowArtifactGCTaskExpansion +} + +// workflowArtifactGCTasks implements WorkflowArtifactGCTaskInterface +type workflowArtifactGCTasks struct { + client rest.Interface + ns string +} + +// newWorkflowArtifactGCTasks returns a WorkflowArtifactGCTasks +func newWorkflowArtifactGCTasks(c *ArgoprojV1alpha1Client, namespace string) *workflowArtifactGCTasks { + return &workflowArtifactGCTasks{ + client: c.RESTClient(), + ns: namespace, + } +} + +// Get takes name of the workflowArtifactGCTask, and returns the corresponding workflowArtifactGCTask object, and an error if there is any. +func (c *workflowArtifactGCTasks) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + result = &v1alpha1.WorkflowArtifactGCTask{} + err = c.client.Get(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + Name(name). + VersionedParams(&options, scheme.ParameterCodec). + Do(ctx). + Into(result) + return +} + +// List takes label and field selectors, and returns the list of WorkflowArtifactGCTasks that match those selectors. +func (c *workflowArtifactGCTasks) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.WorkflowArtifactGCTaskList, err error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + result = &v1alpha1.WorkflowArtifactGCTaskList{} + err = c.client.Get(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Do(ctx). + Into(result) + return +} + +// Watch returns a watch.Interface that watches the requested workflowArtifactGCTasks. +func (c *workflowArtifactGCTasks) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + opts.Watch = true + return c.client.Get(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Watch(ctx) +} + +// Create takes the representation of a workflowArtifactGCTask and creates it. Returns the server's representation of the workflowArtifactGCTask, and an error, if there is any. +func (c *workflowArtifactGCTasks) Create(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.CreateOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + result = &v1alpha1.WorkflowArtifactGCTask{} + err = c.client.Post(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + VersionedParams(&opts, scheme.ParameterCodec). + Body(workflowArtifactGCTask). + Do(ctx). + Into(result) + return +} + +// Update takes the representation of a workflowArtifactGCTask and updates it. Returns the server's representation of the workflowArtifactGCTask, and an error, if there is any. +func (c *workflowArtifactGCTasks) Update(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + result = &v1alpha1.WorkflowArtifactGCTask{} + err = c.client.Put(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + Name(workflowArtifactGCTask.Name). + VersionedParams(&opts, scheme.ParameterCodec). + Body(workflowArtifactGCTask). + Do(ctx). + Into(result) + return +} + +// UpdateStatus was generated because the type contains a Status member. +// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). +func (c *workflowArtifactGCTasks) UpdateStatus(ctx context.Context, workflowArtifactGCTask *v1alpha1.WorkflowArtifactGCTask, opts v1.UpdateOptions) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + result = &v1alpha1.WorkflowArtifactGCTask{} + err = c.client.Put(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + Name(workflowArtifactGCTask.Name). + SubResource("status"). + VersionedParams(&opts, scheme.ParameterCodec). + Body(workflowArtifactGCTask). + Do(ctx). + Into(result) + return +} + +// Delete takes name of the workflowArtifactGCTask and deletes it. Returns an error if one occurs. +func (c *workflowArtifactGCTasks) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { + return c.client.Delete(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + Name(name). + Body(&opts). + Do(ctx). + Error() +} + +// DeleteCollection deletes a collection of objects. +func (c *workflowArtifactGCTasks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { + var timeout time.Duration + if listOpts.TimeoutSeconds != nil { + timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second + } + return c.client.Delete(). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + VersionedParams(&listOpts, scheme.ParameterCodec). + Timeout(timeout). + Body(&opts). + Do(ctx). + Error() +} + +// Patch applies the patch and returns the patched workflowArtifactGCTask. +func (c *workflowArtifactGCTasks) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkflowArtifactGCTask, err error) { + result = &v1alpha1.WorkflowArtifactGCTask{} + err = c.client.Patch(pt). + Namespace(c.ns). + Resource("workflowartifactgctasks"). + Name(name). + SubResource(subresources...). + VersionedParams(&opts, scheme.ParameterCodec). + Body(data). + Do(ctx). + Into(result) + return +} diff --git a/pkg/client/informers/externalversions/generic.go b/pkg/client/informers/externalversions/generic.go index 90e6fb7b4c91..16f33fd0fa22 100644 --- a/pkg/client/informers/externalversions/generic.go +++ b/pkg/client/informers/externalversions/generic.go @@ -43,6 +43,8 @@ func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource return &genericInformer{resource: resource.GroupResource(), informer: f.Argoproj().V1alpha1().CronWorkflows().Informer()}, nil case v1alpha1.SchemeGroupVersion.WithResource("workflows"): return &genericInformer{resource: resource.GroupResource(), informer: f.Argoproj().V1alpha1().Workflows().Informer()}, nil + case v1alpha1.SchemeGroupVersion.WithResource("workflowartifactgctasks"): + return &genericInformer{resource: resource.GroupResource(), informer: f.Argoproj().V1alpha1().WorkflowArtifactGCTasks().Informer()}, nil case v1alpha1.SchemeGroupVersion.WithResource("workfloweventbindings"): return &genericInformer{resource: resource.GroupResource(), informer: f.Argoproj().V1alpha1().WorkflowEventBindings().Informer()}, nil case v1alpha1.SchemeGroupVersion.WithResource("workflowtaskresults"): diff --git a/pkg/client/informers/externalversions/workflow/v1alpha1/interface.go b/pkg/client/informers/externalversions/workflow/v1alpha1/interface.go index c4ca0974197b..d89978caeaa1 100644 --- a/pkg/client/informers/externalversions/workflow/v1alpha1/interface.go +++ b/pkg/client/informers/externalversions/workflow/v1alpha1/interface.go @@ -14,6 +14,8 @@ type Interface interface { CronWorkflows() CronWorkflowInformer // Workflows returns a WorkflowInformer. Workflows() WorkflowInformer + // WorkflowArtifactGCTasks returns a WorkflowArtifactGCTaskInformer. + WorkflowArtifactGCTasks() WorkflowArtifactGCTaskInformer // WorkflowEventBindings returns a WorkflowEventBindingInformer. WorkflowEventBindings() WorkflowEventBindingInformer // WorkflowTaskResults returns a WorkflowTaskResultInformer. @@ -50,6 +52,11 @@ func (v *version) Workflows() WorkflowInformer { return &workflowInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} } +// WorkflowArtifactGCTasks returns a WorkflowArtifactGCTaskInformer. +func (v *version) WorkflowArtifactGCTasks() WorkflowArtifactGCTaskInformer { + return &workflowArtifactGCTaskInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} +} + // WorkflowEventBindings returns a WorkflowEventBindingInformer. func (v *version) WorkflowEventBindings() WorkflowEventBindingInformer { return &workflowEventBindingInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} diff --git a/pkg/client/informers/externalversions/workflow/v1alpha1/workflowartifactgctask.go b/pkg/client/informers/externalversions/workflow/v1alpha1/workflowartifactgctask.go new file mode 100644 index 000000000000..064a01a9c3e6 --- /dev/null +++ b/pkg/client/informers/externalversions/workflow/v1alpha1/workflowartifactgctask.go @@ -0,0 +1,74 @@ +// Code generated by informer-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + "context" + time "time" + + workflowv1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + versioned "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned" + internalinterfaces "github.com/argoproj/argo-workflows/v3/pkg/client/informers/externalversions/internalinterfaces" + v1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/client/listers/workflow/v1alpha1" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + runtime "k8s.io/apimachinery/pkg/runtime" + watch "k8s.io/apimachinery/pkg/watch" + cache "k8s.io/client-go/tools/cache" +) + +// WorkflowArtifactGCTaskInformer provides access to a shared informer and lister for +// WorkflowArtifactGCTasks. +type WorkflowArtifactGCTaskInformer interface { + Informer() cache.SharedIndexInformer + Lister() v1alpha1.WorkflowArtifactGCTaskLister +} + +type workflowArtifactGCTaskInformer struct { + factory internalinterfaces.SharedInformerFactory + tweakListOptions internalinterfaces.TweakListOptionsFunc + namespace string +} + +// NewWorkflowArtifactGCTaskInformer constructs a new informer for WorkflowArtifactGCTask type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewWorkflowArtifactGCTaskInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { + return NewFilteredWorkflowArtifactGCTaskInformer(client, namespace, resyncPeriod, indexers, nil) +} + +// NewFilteredWorkflowArtifactGCTaskInformer constructs a new informer for WorkflowArtifactGCTask type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewFilteredWorkflowArtifactGCTaskInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { + return cache.NewSharedIndexInformer( + &cache.ListWatch{ + ListFunc: func(options v1.ListOptions) (runtime.Object, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.ArgoprojV1alpha1().WorkflowArtifactGCTasks(namespace).List(context.TODO(), options) + }, + WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.ArgoprojV1alpha1().WorkflowArtifactGCTasks(namespace).Watch(context.TODO(), options) + }, + }, + &workflowv1alpha1.WorkflowArtifactGCTask{}, + resyncPeriod, + indexers, + ) +} + +func (f *workflowArtifactGCTaskInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { + return NewFilteredWorkflowArtifactGCTaskInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) +} + +func (f *workflowArtifactGCTaskInformer) Informer() cache.SharedIndexInformer { + return f.factory.InformerFor(&workflowv1alpha1.WorkflowArtifactGCTask{}, f.defaultInformer) +} + +func (f *workflowArtifactGCTaskInformer) Lister() v1alpha1.WorkflowArtifactGCTaskLister { + return v1alpha1.NewWorkflowArtifactGCTaskLister(f.Informer().GetIndexer()) +} diff --git a/pkg/client/listers/workflow/v1alpha1/expansion_generated.go b/pkg/client/listers/workflow/v1alpha1/expansion_generated.go index d66c352cd562..0f3d203fa214 100644 --- a/pkg/client/listers/workflow/v1alpha1/expansion_generated.go +++ b/pkg/client/listers/workflow/v1alpha1/expansion_generated.go @@ -22,6 +22,14 @@ type WorkflowListerExpansion interface{} // WorkflowNamespaceLister. type WorkflowNamespaceListerExpansion interface{} +// WorkflowArtifactGCTaskListerExpansion allows custom methods to be added to +// WorkflowArtifactGCTaskLister. +type WorkflowArtifactGCTaskListerExpansion interface{} + +// WorkflowArtifactGCTaskNamespaceListerExpansion allows custom methods to be added to +// WorkflowArtifactGCTaskNamespaceLister. +type WorkflowArtifactGCTaskNamespaceListerExpansion interface{} + // WorkflowEventBindingListerExpansion allows custom methods to be added to // WorkflowEventBindingLister. type WorkflowEventBindingListerExpansion interface{} diff --git a/pkg/client/listers/workflow/v1alpha1/workflowartifactgctask.go b/pkg/client/listers/workflow/v1alpha1/workflowartifactgctask.go new file mode 100644 index 000000000000..7bee8f1cc3ee --- /dev/null +++ b/pkg/client/listers/workflow/v1alpha1/workflowartifactgctask.go @@ -0,0 +1,83 @@ +// Code generated by lister-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + v1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/tools/cache" +) + +// WorkflowArtifactGCTaskLister helps list WorkflowArtifactGCTasks. +// All objects returned here must be treated as read-only. +type WorkflowArtifactGCTaskLister interface { + // List lists all WorkflowArtifactGCTasks in the indexer. + // Objects returned here must be treated as read-only. + List(selector labels.Selector) (ret []*v1alpha1.WorkflowArtifactGCTask, err error) + // WorkflowArtifactGCTasks returns an object that can list and get WorkflowArtifactGCTasks. + WorkflowArtifactGCTasks(namespace string) WorkflowArtifactGCTaskNamespaceLister + WorkflowArtifactGCTaskListerExpansion +} + +// workflowArtifactGCTaskLister implements the WorkflowArtifactGCTaskLister interface. +type workflowArtifactGCTaskLister struct { + indexer cache.Indexer +} + +// NewWorkflowArtifactGCTaskLister returns a new WorkflowArtifactGCTaskLister. +func NewWorkflowArtifactGCTaskLister(indexer cache.Indexer) WorkflowArtifactGCTaskLister { + return &workflowArtifactGCTaskLister{indexer: indexer} +} + +// List lists all WorkflowArtifactGCTasks in the indexer. +func (s *workflowArtifactGCTaskLister) List(selector labels.Selector) (ret []*v1alpha1.WorkflowArtifactGCTask, err error) { + err = cache.ListAll(s.indexer, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha1.WorkflowArtifactGCTask)) + }) + return ret, err +} + +// WorkflowArtifactGCTasks returns an object that can list and get WorkflowArtifactGCTasks. +func (s *workflowArtifactGCTaskLister) WorkflowArtifactGCTasks(namespace string) WorkflowArtifactGCTaskNamespaceLister { + return workflowArtifactGCTaskNamespaceLister{indexer: s.indexer, namespace: namespace} +} + +// WorkflowArtifactGCTaskNamespaceLister helps list and get WorkflowArtifactGCTasks. +// All objects returned here must be treated as read-only. +type WorkflowArtifactGCTaskNamespaceLister interface { + // List lists all WorkflowArtifactGCTasks in the indexer for a given namespace. + // Objects returned here must be treated as read-only. + List(selector labels.Selector) (ret []*v1alpha1.WorkflowArtifactGCTask, err error) + // Get retrieves the WorkflowArtifactGCTask from the indexer for a given namespace and name. + // Objects returned here must be treated as read-only. + Get(name string) (*v1alpha1.WorkflowArtifactGCTask, error) + WorkflowArtifactGCTaskNamespaceListerExpansion +} + +// workflowArtifactGCTaskNamespaceLister implements the WorkflowArtifactGCTaskNamespaceLister +// interface. +type workflowArtifactGCTaskNamespaceLister struct { + indexer cache.Indexer + namespace string +} + +// List lists all WorkflowArtifactGCTasks in the indexer for a given namespace. +func (s workflowArtifactGCTaskNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.WorkflowArtifactGCTask, err error) { + err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha1.WorkflowArtifactGCTask)) + }) + return ret, err +} + +// Get retrieves the WorkflowArtifactGCTask from the indexer for a given namespace and name. +func (s workflowArtifactGCTaskNamespaceLister) Get(name string) (*v1alpha1.WorkflowArtifactGCTask, error) { + obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) + if err != nil { + return nil, err + } + if !exists { + return nil, errors.NewNotFound(v1alpha1.Resource("workflowartifactgctask"), name) + } + return obj.(*v1alpha1.WorkflowArtifactGCTask), nil +} diff --git a/pkg/plugins/executor/swagger.yml b/pkg/plugins/executor/swagger.yml index 19574a7321f2..8f2c2fc133a0 100644 --- a/pkg/plugins/executor/swagger.yml +++ b/pkg/plugins/executor/swagger.yml @@ -11,7 +11,7 @@ definitions: properties: fsType: description: |- - Filesystem type of the volume that you want to mount. + fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore @@ -20,7 +20,7 @@ definitions: type: string partition: description: |- - The partition in the volume that you want to mount. + partition is the partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as "1". Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). @@ -29,14 +29,13 @@ definitions: type: integer readOnly: description: |- - Specify "true" to force and set the ReadOnly property in VolumeMounts to "true". - If omitted, the default is "false". + readOnly value true will force the readOnly setting in VolumeMounts. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore +optional type: boolean volumeID: description: |- - Unique ID of the persistent disk resource in AWS (Amazon EBS volume). + volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore type: string title: Represents a Persistent Disk resource in AWS. @@ -94,8 +93,15 @@ definitions: archiveLogs: description: ArchiveLogs indicates if the container logs should be archived type: boolean + artifactGC: + $ref: '#/definitions/ArtifactGC' artifactory: $ref: '#/definitions/ArtifactoryArtifact' + azure: + $ref: '#/definitions/AzureArtifact' + deleted: + description: Has this been deleted? + type: boolean from: description: From allows an artifact to reference an artifact from a previous step @@ -147,6 +153,21 @@ definitions: the specified source type: string type: object + ArtifactGC: + description: ArtifactGC describes how to delete artifacts from completed Workflows + properties: + podMetadata: + $ref: '#/definitions/Metadata' + serviceAccountName: + description: ServiceAccountName is an optional field for specifying the Service + Account that should be assigned to the Pod doing the deletion + type: string + strategy: + $ref: '#/definitions/ArtifactGCStrategy' + type: object + ArtifactGCStrategy: + title: ArtifactGCStrategy is the strategy when to delete artifacts for GC. + type: string ArtifactLocation: description: |- It is used as single artifact in the context of inputs/outputs (e.g. outputs.artifacts.artname). @@ -158,6 +179,8 @@ definitions: type: boolean artifactory: $ref: '#/definitions/ArtifactoryArtifact' + azure: + $ref: '#/definitions/AzureArtifact' gcs: $ref: '#/definitions/GCSArtifact' git: @@ -182,8 +205,15 @@ definitions: archiveLogs: description: ArchiveLogs indicates if the container logs should be archived type: boolean + artifactGC: + $ref: '#/definitions/ArtifactGC' artifactory: $ref: '#/definitions/ArtifactoryArtifact' + azure: + $ref: '#/definitions/AzureArtifact' + deleted: + description: Has this been deleted? + type: boolean from: description: From allows an artifact to reference an artifact from a previous step @@ -250,23 +280,46 @@ definitions: items: $ref: '#/definitions/Artifact' type: array + AzureArtifact: + description: AzureArtifact is the location of a an Azure Storage artifact + properties: + accountKeySecret: + $ref: '#/definitions/SecretKeySelector' + blob: + description: Blob is the blob name (i.e., path) in the container where the + artifact resides + type: string + container: + description: Container is the container where resources will be stored + type: string + endpoint: + description: Endpoint is the service url associated with an account. It is + most likely "https://.blob.core.windows.net" + type: string + useSDKCreds: + description: UseSDKCreds tells the driver to figure out credentials based + on sdk defaults. + type: boolean + type: object AzureDataDiskCachingMode: + description: +enum type: string AzureDataDiskKind: + description: +enum type: string AzureDiskVolumeSource: properties: cachingMode: $ref: '#/definitions/AzureDataDiskCachingMode' diskName: - description: The Name of the data disk in the blob storage + description: diskName is the Name of the data disk in the blob storage type: string diskURI: - description: The URI the data disk in the blob storage + description: diskURI is the URI of data disk in the blob storage type: string fsType: description: |- - Filesystem type to mount. + fsType is Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional @@ -275,7 +328,7 @@ definitions: $ref: '#/definitions/AzureDataDiskKind' readOnly: description: |- - Defaults to false (read/write). ReadOnly here will force + readOnly Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean @@ -286,16 +339,16 @@ definitions: properties: readOnly: description: |- - Defaults to false (read/write). ReadOnly here will force + readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean secretName: - description: the name of secret that contains Azure Storage Account Name and - Key + description: secretName is the name of secret that contains Azure Storage + Account Name and Key type: string shareName: - description: Share Name + description: shareName is the azure share Name type: string title: AzureFile represents an Azure File Service mount on the host and bind mount to the pod. @@ -314,18 +367,26 @@ definitions: strategy type: string type: object + BasicAuth: + description: BasicAuth describes the secret selectors required for basic authentication + properties: + passwordSecret: + $ref: '#/definitions/SecretKeySelector' + usernameSecret: + $ref: '#/definitions/SecretKeySelector' + type: object CSIVolumeSource: description: Represents a source location of a volume to mount, managed by an external CSI driver properties: driver: description: |- - Driver is the name of the CSI driver that handles this volume. + driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster. type: string fsType: description: |- - Filesystem type to mount. Ex. "ext4", "xfs", "ntfs". + fsType to mount. Ex. "ext4", "xfs", "ntfs". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply. +optional @@ -334,7 +395,7 @@ definitions: $ref: '#/definitions/LocalObjectReference' readOnly: description: |- - Specifies a read-only configuration for the volume. + readOnly specifies a read-only configuration for the volume. Defaults to false (read/write). +optional type: boolean @@ -342,7 +403,7 @@ definitions: additionalProperties: type: string description: |- - VolumeAttributes stores driver-specific properties that are passed to the CSI + volumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values. +optional type: object @@ -381,26 +442,26 @@ definitions: properties: monitors: description: |- - Required: Monitors is a collection of Ceph monitors + monitors is Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it items: type: string type: array path: description: |- - Optional: Used as the mounted root, rather than the full Ceph tree, default is / + path is Optional: Used as the mounted root, rather than the full Ceph tree, default is / +optional type: string readOnly: description: |- - Optional: Defaults to false (read/write). ReadOnly here will force + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional type: boolean secretFile: description: |- - Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret + secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional type: string @@ -408,7 +469,7 @@ definitions: $ref: '#/definitions/LocalObjectReference' user: description: |- - Optional: User is the rados user name, default is admin + user is optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it +optional type: string @@ -421,7 +482,7 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md @@ -429,7 +490,7 @@ definitions: type: string readOnly: description: |- - Optional: Defaults to false (read/write). ReadOnly here will force + readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md +optional @@ -438,11 +499,20 @@ definitions: $ref: '#/definitions/LocalObjectReference' volumeID: description: |- - volume id used to identify the volume in cinder. + volumeID used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md type: string title: Represents a cinder volume resource in Openstack. type: object + ClientCertAuth: + description: ClientCertAuth holds necessary information for client authentication + via certificates + properties: + clientCertSecret: + $ref: '#/definitions/SecretKeySelector' + clientKeySecret: + $ref: '#/definitions/SecretKeySelector' + type: object ConfigMapEnvSource: description: |- The contents of the target ConfigMap's Data field will represent the @@ -465,6 +535,7 @@ definitions: variables with. type: object ConfigMapKeySelector: + description: +structType=atomic properties: key: description: The key to select. @@ -493,7 +564,7 @@ definitions: properties: items: description: |- - If unspecified, each key-value pair in the Data field of the referenced + items if unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -513,7 +584,7 @@ definitions: type: string optional: description: |- - Specify whether the ConfigMap or its keys must be defined + optional specify whether the ConfigMap or its keys must be defined +optional type: boolean title: Adapts a ConfigMap into a projected volume. @@ -527,7 +598,7 @@ definitions: properties: defaultMode: description: |- - Optional: mode bits used to set permissions on created files by default. + defaultMode is optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. @@ -539,7 +610,7 @@ definitions: type: integer items: description: |- - If unspecified, each key-value pair in the Data field of the referenced + items if unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -559,7 +630,7 @@ definitions: type: string optional: description: |- - Specify whether the ConfigMap or its keys must be defined + optional specify whether the ConfigMap or its keys must be defined +optional type: boolean title: Adapts a ConfigMap into a volume. @@ -569,12 +640,12 @@ definitions: args: description: |- Arguments to the entrypoint. - The docker image's CMD is used if this is not provided. + The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -583,12 +654,12 @@ definitions: command: description: |- Entrypoint array. Not executed within a shell. - The docker image's ENTRYPOINT is used if this is not provided. + The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -618,7 +689,7 @@ definitions: type: array image: description: |- - Docker image name. + Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -733,12 +804,12 @@ definitions: args: description: |- Arguments to the entrypoint. - The docker image's CMD is used if this is not provided. + The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -747,12 +818,12 @@ definitions: command: description: |- Entrypoint array. Not executed within a shell. - The docker image's ENTRYPOINT is used if this is not provided. + The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -786,7 +857,7 @@ definitions: type: array image: description: |- - Docker image name. + Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -928,12 +999,24 @@ definitions: $ref: '#/definitions/Protocol' title: ContainerPort represents a network port in a single container. type: object + ContainerSetRetryStrategy: + properties: + duration: + description: |- + Duration is the time between each retry, examples values are "300ms", "1s" or "5m". + Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". + type: string + retries: + $ref: '#/definitions/IntOrString' + type: object ContainerSetTemplate: properties: containers: items: $ref: '#/definitions/ContainerNode' type: array + retryStrategy: + $ref: '#/definitions/ContainerSetRetryStrategy' volumeMounts: items: $ref: '#/definitions/VolumeMount' @@ -1158,11 +1241,12 @@ definitions: value: description: |- Variable references $(VAR_NAME) are expanded - using the previous defined environment variables in the container and + using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, - the reference in the input string will be unchanged. The $(VAR_NAME) - syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped - references will never be expanded, regardless of whether the variable + the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. + "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to "". +optional @@ -1236,7 +1320,7 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. TODO: how do we prevent errors in the filesystem from compromising the machine @@ -1244,26 +1328,26 @@ definitions: type: string lun: description: |- - Optional: FC target lun number + lun is Optional: FC target lun number +optional format: int32 type: integer readOnly: description: |- - Optional: Defaults to false (read/write). ReadOnly here will force + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean targetWWNs: description: |- - Optional: FC target worldwide names (WWNs) + targetWWNs is Optional: FC target worldwide names (WWNs) +optional items: type: string type: array wwids: description: |- - Optional: FC volume world wide identifiers (wwids) + wwids Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. +optional items: @@ -1292,11 +1376,11 @@ definitions: provisioned/attached using an exec based plugin. properties: driver: - description: Driver is the name of the driver to use for this volume. + description: driver is the name of the driver to use for this volume. type: string fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script. +optional @@ -1305,12 +1389,12 @@ definitions: additionalProperties: type: string description: |- - Optional: Extra command options if any. + options is Optional: this field holds extra command options if any. +optional type: object readOnly: description: |- - Optional: Defaults to false (read/write). ReadOnly here will force + readOnly is Optional: defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean @@ -1324,13 +1408,13 @@ definitions: properties: datasetName: description: |- - Name of the dataset stored as metadata -> name on the dataset for Flocker + datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated +optional type: string datasetUUID: description: |- - UUID of the dataset. This is unique identifier of a Flocker dataset + datasetUUID is the UUID of the dataset. This is unique identifier of a Flocker dataset +optional type: string title: Represents a Flocker volume mounted by the Flocker agent. @@ -1344,7 +1428,7 @@ definitions: properties: fsType: description: |- - Filesystem type of the volume that you want to mount. + fsType is filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk @@ -1353,7 +1437,7 @@ definitions: type: string partition: description: |- - The partition in the volume that you want to mount. + partition is the partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as "1". Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). @@ -1363,12 +1447,12 @@ definitions: type: integer pdName: description: |- - Unique name of the PD resource in GCE. Used to identify the disk in GCE. + pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk type: string readOnly: description: |- - ReadOnly here will force the ReadOnly setting in VolumeMounts. + readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk +optional @@ -1387,6 +1471,23 @@ definitions: serviceAccountKeySecret: $ref: '#/definitions/SecretKeySelector' type: object + GRPCAction: + properties: + port: + description: Port number of the gRPC service. Number must be in the range + 1 to 65535. + format: int32 + type: integer + service: + description: |- + Service is the name of the service to place in the gRPC HealthCheckRequest + (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). + + If this is not specified, the default behavior is defined by gRPC. + +optional + +default="" + type: string + type: object Gauge: description: Gauge is a Gauge prometheus metric properties: @@ -1400,6 +1501,9 @@ definitions: GitArtifact: description: GitArtifact is the location of an git artifact properties: + branch: + description: Branch is the branch to fetch when `SingleBranch` is enabled + type: string depth: description: |- Depth specifies clones/fetches should be shallow and include the given @@ -1427,6 +1531,10 @@ definitions: revision: description: Revision is the git commit, tag, branch to checkout type: string + singleBranch: + description: SingleBranch enables single branch clone, using the `branch` + parameter + type: boolean sshPrivateKeySecret: $ref: '#/definitions/SecretKeySelector' usernameSecret: @@ -1440,18 +1548,18 @@ definitions: properties: directory: description: |- - Target directory name. + directory is the target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name. +optional type: string repository: - description: Repository URL + description: repository is the URL type: string revision: description: |- - Commit hash for the specified revision. + revision is the commit hash for the specified revision. +optional type: string title: |- @@ -1465,17 +1573,17 @@ definitions: properties: endpoints: description: |- - EndpointsName is the endpoint name that details Glusterfs topology. + endpoints is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod type: string path: description: |- - Path is the Glusterfs volume path. + path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod type: string readOnly: description: |- - ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. + readOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod +optional @@ -1528,14 +1636,21 @@ definitions: body: description: Body is content of the HTTP Request type: string + bodyFrom: + $ref: '#/definitions/HTTPBodySource' headers: - description: Headers are an optional list of headers to send with HTTP requests - items: - $ref: '#/definitions/HTTPHeader' - type: array + $ref: '#/definitions/HTTPHeaders' + insecureSkipVerify: + description: InsecureSkipVerify is a bool when if set to true will skip TLS + verification for the HTTP client + type: boolean method: description: Method is HTTP methods for HTTP Request type: string + successCondition: + description: SuccessCondition is an expression if evaluated to true is considered + successful + type: string timeoutSeconds: description: TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds @@ -1546,9 +1661,11 @@ definitions: type: string type: object HTTPArtifact: - description: HTTPArtifact allows an file served on HTTP to be placed as an input + description: HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container properties: + auth: + $ref: '#/definitions/HTTPAuth' headers: description: Headers are an optional list of headers to send with HTTP requests for artifacts @@ -1559,6 +1676,24 @@ definitions: description: URL of the artifact type: string type: object + HTTPAuth: + properties: + basicAuth: + $ref: '#/definitions/BasicAuth' + clientCert: + $ref: '#/definitions/ClientCertAuth' + oauth2: + $ref: '#/definitions/OAuth2Auth' + type: object + HTTPBodySource: + properties: + bytes: + items: + format: uint8 + type: integer + type: array + title: HTTPBodySource contains the source of the HTTP body. + type: object HTTPGetAction: properties: host: @@ -1599,18 +1734,10 @@ definitions: secretKeyRef: $ref: '#/definitions/SecretKeySelector' type: object - Handler: - description: |- - Handler defines a specific action that should be taken - TODO: pass structured data to these actions, and document that data here. - properties: - exec: - $ref: '#/definitions/ExecAction' - httpGet: - $ref: '#/definitions/HTTPGetAction' - tcpSocket: - $ref: '#/definitions/TCPSocketAction' - type: object + HTTPHeaders: + items: + $ref: '#/definitions/HTTPHeader' + type: array Header: description: Header indicate a key-value request header to be used when fetching artifacts over HTTP @@ -1649,6 +1776,7 @@ definitions: type: string type: object HostPathType: + description: +enum type: string HostPathVolumeSource: description: Host path volumes do not support ownership management or SELinux @@ -1656,7 +1784,7 @@ definitions: properties: path: description: |- - Path of the directory on the host. + path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath type: string @@ -1671,17 +1799,17 @@ definitions: properties: chapAuthDiscovery: description: |- - whether support iSCSI Discovery CHAP authentication + chapAuthDiscovery defines whether support iSCSI Discovery CHAP authentication +optional type: boolean chapAuthSession: description: |- - whether support iSCSI Session CHAP authentication + chapAuthSession defines whether support iSCSI Session CHAP authentication +optional type: boolean fsType: description: |- - Filesystem type of the volume that you want to mount. + fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi @@ -1690,27 +1818,27 @@ definitions: type: string initiatorName: description: |- - Custom iSCSI Initiator Name. + initiatorName is the custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface : will be created for the connection. +optional type: string iqn: - description: Target iSCSI Qualified Name. + description: iqn is the target iSCSI Qualified Name. type: string iscsiInterface: description: |- - iSCSI Interface Name that uses an iSCSI transport. + iscsiInterface is the interface Name that uses an iSCSI transport. Defaults to 'default' (tcp). +optional type: string lun: - description: iSCSI Target Lun number. + description: lun represents iSCSI Target Lun number. format: int32 type: integer portals: description: |- - iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port + portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). +optional items: @@ -1718,7 +1846,7 @@ definitions: type: array readOnly: description: |- - ReadOnly here will force the ReadOnly setting in VolumeMounts. + readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. +optional type: boolean @@ -1726,7 +1854,7 @@ definitions: $ref: '#/definitions/LocalObjectReference' targetPortal: description: |- - iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port + targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). type: string title: Represents an ISCSI disk. @@ -1777,11 +1905,11 @@ definitions: KeyToPath: properties: key: - description: The key to project. + description: key is the key to project. type: string mode: description: |- - Optional: mode bits used to set permissions on this file. + mode is Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. @@ -1792,7 +1920,7 @@ definitions: type: integer path: description: |- - The relative path of the file to map the key to. + path is the relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. @@ -1858,16 +1986,36 @@ definitions: until the action is complete, unless the container process fails, in which case the handler is aborted. properties: postStart: - $ref: '#/definitions/Handler' + $ref: '#/definitions/LifecycleHandler' preStop: - $ref: '#/definitions/Handler' + $ref: '#/definitions/LifecycleHandler' + type: object + LifecycleHandler: + description: |- + LifecycleHandler defines a specific action that should be taken in a lifecycle + hook. One and only one of the fields, except TCPSocket must be specified. + properties: + exec: + $ref: '#/definitions/ExecAction' + httpGet: + $ref: '#/definitions/HTTPGetAction' + tcpSocket: + $ref: '#/definitions/TCPSocketAction' type: object LifecycleHook: properties: arguments: $ref: '#/definitions/Arguments' + expression: + description: |- + Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not + be retried and the retry strategy will be ignored + type: string template: + description: Template is the name of the template to execute by the hook type: string + templateRef: + $ref: '#/definitions/TemplateRef' type: object LifecycleHooks: additionalProperties: @@ -1877,6 +2025,7 @@ definitions: description: |- LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. + +structType=atomic properties: name: description: |- @@ -1910,6 +2059,16 @@ definitions: type: string operation: $ref: '#/definitions/ManagedFieldsOperationType' + subresource: + description: |- + Subresource is the name of the subresource used to update that object, or + empty string if the object was updated through the main resource. The + value of this field is used to distinguish between managers, even if they + share the same name. For example, a status update will be distinct from a + regular update using the same manager name. + Note that the APIVersion field is not related to the Subresource field and + it always corresponds to the version of the main resource. + type: string time: $ref: '#/definitions/Time' type: object @@ -1917,6 +2076,11 @@ definitions: title: ManagedFieldsOperationType is the type of operation which lead to a ManagedFieldsEntry being created. type: string + ManifestFrom: + properties: + artifact: + $ref: '#/definitions/Artifact' + type: object Memoize: description: Memoization enables caching for the Outputs of the template properties: @@ -1961,6 +2125,7 @@ definitions: type: array type: object MountPropagationMode: + description: +enum title: MountPropagationMode describes mount propagation. type: string Mutex: @@ -1975,20 +2140,19 @@ definitions: properties: path: description: |- - Path that is exported by the NFS server. + path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs type: string readOnly: description: |- - ReadOnly here will force - the NFS export to be mounted with read-only permissions. + readOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs +optional type: boolean server: description: |- - Server is the hostname or IP address of the NFS server. + server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs type: string title: Represents an NFS mount that lasts the lifetime of a pod. @@ -2025,12 +2189,15 @@ definitions: $ref: '#/definitions/Outputs' phase: $ref: '#/definitions/NodePhase' + progress: + $ref: '#/definitions/Progress' type: object NodeSelector: description: |- A node selector represents the union of the results of one or more label queries over a set of nodes; that is, it represents the OR of the selectors represented by the node selector terms. + +structType=atomic properties: nodeSelectorTerms: description: Required. A list of node selector terms. The terms are ORed. @@ -2042,6 +2209,7 @@ definitions: description: |- A node selector operator is the set of operators that can be used in a node selector requirement. + +enum type: string NodeSelectorRequirement: description: |- @@ -2070,6 +2238,7 @@ definitions: A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. + +structType=atomic properties: matchExpressions: description: |- @@ -2092,6 +2261,36 @@ definitions: files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately. type: object + OAuth2Auth: + description: OAuth2Auth holds all information for client authentication via OAuth2 + tokens + properties: + clientIDSecret: + $ref: '#/definitions/SecretKeySelector' + clientSecretSecret: + $ref: '#/definitions/SecretKeySelector' + endpointParams: + items: + $ref: '#/definitions/OAuth2EndpointParam' + type: array + scopes: + items: + type: string + type: array + tokenURLSecret: + $ref: '#/definitions/SecretKeySelector' + type: object + OAuth2EndpointParam: + description: EndpointParam is for requesting optional fields that should be sent + in the oauth request + properties: + key: + description: Name is the header name + type: string + value: + description: Value is the literal value to use for the header + type: string + type: object OSSArtifact: description: OSSArtifact is the location of an Alibaba Cloud OSS artifact properties: @@ -2133,10 +2332,8 @@ definitions: format: int32 type: integer type: object - Object: - description: +kubebuilder:validation:Type=object - type: object ObjectFieldSelector: + description: +structType=atomic properties: apiVersion: description: |- @@ -2178,6 +2375,7 @@ definitions: OwnerReference contains enough information to let you identify an owning object. An owning object must be in the same namespace as the dependent, or be cluster-scoped, so there is no namespace field. + +structType=atomic properties: apiVersion: description: API version of the referent. @@ -2187,6 +2385,8 @@ definitions: If true, AND if the owner has the "foregroundDeletion" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. + See https://kubernetes.io/docs/concepts/architecture/garbage-collection/#foreground-deletion + for how the garbage collector interacts with this field and enforces the foreground deletion. Defaults to false. To set this field, a user needs "delete" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned. @@ -2219,6 +2419,8 @@ definitions: properties: default: $ref: '#/definitions/AnyString' + description: + $ref: '#/definitions/AnyString' enum: description: Enum holds a list of string values to choose from, for the actual value of the parameter @@ -2239,6 +2441,7 @@ definitions: $ref: '#/definitions/ValueFrom' type: object PersistentVolumeAccessMode: + description: +enum type: string PersistentVolumeClaimSpec: description: |- @@ -2247,7 +2450,7 @@ definitions: properties: accessModes: description: |- - AccessModes contains the desired access modes the volume should have. + accessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 +optional items: @@ -2255,13 +2458,15 @@ definitions: type: array dataSource: $ref: '#/definitions/TypedLocalObjectReference' + dataSourceRef: + $ref: '#/definitions/TypedLocalObjectReference' resources: $ref: '#/definitions/ResourceRequirements' selector: $ref: '#/definitions/LabelSelector' storageClassName: description: |- - Name of the StorageClass required by the claim. + storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 +optional type: string @@ -2269,7 +2474,7 @@ definitions: $ref: '#/definitions/PersistentVolumeMode' volumeName: description: |- - VolumeName is the binding reference to the PersistentVolume backing this claim. + volumeName is the binding reference to the PersistentVolume backing this claim. +optional type: string type: object @@ -2290,9 +2495,12 @@ definitions: type: object clusterName: description: |- - The name of the cluster which the object belongs to. - This is used to distinguish resources with same name and namespace in different clusters. - This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request. + Deprecated: ClusterName is a legacy field that was always cleared by + the system and never used; it will be removed completely in 1.25. + + The name in the go struct is changed to help clients detect + accidental use. + +optional type: string creationTimestamp: @@ -2338,10 +2546,7 @@ definitions: and may be truncated by the length of the suffix required to make the value unique on the server. - If this field is specified and the generated name exists, the server will - NOT return a 409 - instead, it will either return 201 Created or 500 with Reason - ServerTimeout indicating a unique name could not be found in the time allotted, and the client - should retry (optionally after the time indicated in the Retry-After header). + If this field is specified and the generated name exists, the server will return a 409. Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency @@ -2428,13 +2633,7 @@ definitions: type: string selfLink: description: |- - SelfLink is a URL representing this object. - Populated by the system. - Read-only. - - DEPRECATED - Kubernetes will stop propagating this field in 1.20 release and the field is planned - to be removed in 1.21 release. + Deprecated: selfLink is a legacy read-only field that is no longer populated by the system. +optional type: string spec: @@ -2450,12 +2649,12 @@ definitions: properties: claimName: description: |- - ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. + claimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims type: string readOnly: description: |- - Will force the ReadOnly setting in VolumeMounts. + readOnly Will force the ReadOnly setting in VolumeMounts. Default false. +optional type: boolean @@ -2463,6 +2662,7 @@ definitions: namespace. type: object PersistentVolumeMode: + description: +enum title: PersistentVolumeMode describes how a volume is intended to be consumed, either Block or Filesystem. type: string @@ -2470,15 +2670,18 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. type: string pdID: - description: ID that identifies Photon Controller persistent disk + description: pdID is the ID that identifies Photon Controller persistent disk type: string title: Represents a Photon Controller persistent disk resource. type: object + Plugin: + description: Plugin is an Object with exactly one key + type: object PodAffinity: properties: preferredDuringSchedulingIgnoredDuringExecution: @@ -2529,7 +2732,7 @@ definitions: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. - null or empty namespaces list and null namespaceSelector means "this pod's namespace" + null or empty namespaces list and null namespaceSelector means "this pod's namespace". +optional items: type: string @@ -2579,6 +2782,7 @@ definitions: description: |- PodFSGroupChangePolicy holds policies that will be used for applying fsGroup to a volume when volume is mounted. + +enum type: string PodSecurityContext: description: |- @@ -2596,6 +2800,7 @@ definitions: 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. + Note that this field cannot be set when spec.os.name is windows. +optional format: int64 type: integer @@ -2608,6 +2813,7 @@ definitions: May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. + Note that this field cannot be set when spec.os.name is windows. +optional format: int64 type: integer @@ -2628,6 +2834,7 @@ definitions: May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. + Note that this field cannot be set when spec.os.name is windows. +optional format: int64 type: integer @@ -2640,6 +2847,7 @@ definitions: A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. + Note that this field cannot be set when spec.os.name is windows. +optional items: format: int64 @@ -2649,6 +2857,7 @@ definitions: description: |- Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. + Note that this field cannot be set when spec.os.name is windows. +optional items: $ref: '#/definitions/Sysctl' @@ -2662,18 +2871,18 @@ definitions: properties: fsType: description: |- - FSType represents the filesystem type to mount + fSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs". Implicitly inferred to be "ext4" if unspecified. type: string readOnly: description: |- - Defaults to false (read/write). ReadOnly here will force + readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean volumeID: - description: VolumeID uniquely identifies a Portworx volume + description: volumeID uniquely identifies a Portworx volume type: string title: PortworxVolumeSource represents a Portworx volume resource. type: object @@ -2704,6 +2913,8 @@ definitions: +optional format: int32 type: integer + grpc: + $ref: '#/definitions/GRPCAction' httpGet: $ref: '#/definitions/HTTPGetAction' initialDelaySeconds: @@ -2739,7 +2950,8 @@ definitions: value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). - This is an alpha field and requires enabling ProbeTerminationGracePeriod feature gate. + This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset. +optional format: int64 type: integer @@ -2753,13 +2965,17 @@ definitions: type: integer type: object ProcMountType: + description: +enum + type: string + Progress: + title: Progress in N/M format. N is number of task complete. M is number of tasks. type: string ProjectedVolumeSource: description: Represents a projected volume source properties: defaultMode: description: |- - Mode bits used to set permissions on created files by default. + defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. @@ -2770,7 +2986,7 @@ definitions: type: integer sources: description: |- - list of volume projections + sources is the list of volume projections +optional items: $ref: '#/definitions/VolumeProjection' @@ -2802,11 +3018,14 @@ definitions: type: string type: object Protocol: + description: +enum title: Protocol defines network protocols supported for things like container ports. type: string PullPolicy: - description: PullPolicy describes a policy for if/when to pull a container image + description: |- + PullPolicy describes a policy for if/when to pull a container image + +enum type: string Quantity: description: |- @@ -2874,36 +3093,36 @@ definitions: properties: group: description: |- - Group to map volume access to + group to map volume access to Default is no group +optional type: string readOnly: description: |- - ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. + readOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false. +optional type: boolean registry: description: |- - Registry represents a single or multiple Quobyte Registry services + registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes type: string tenant: description: |- - Tenant owning the given Quobyte volume in the Backend + tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin +optional type: string user: description: |- - User to map volume access to + user to map volume access to Defaults to serivceaccount user +optional type: string volume: - description: Volume is a string that references an already created Quobyte + description: volume is a string that references an already created Quobyte volume by name. type: string title: Represents a Quobyte mount that lasts the lifetime of a pod. @@ -2913,7 +3132,7 @@ definitions: properties: fsType: description: |- - Filesystem type of the volume that you want to mount. + fsType is the filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd @@ -2922,33 +3141,33 @@ definitions: type: string image: description: |- - The rados image name. + image is the rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it type: string keyring: description: |- - Keyring is the path to key ring for RBDUser. + keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional type: string monitors: description: |- - A collection of Ceph monitors. + monitors is a collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it items: type: string type: array pool: description: |- - The rados pool name. + pool is the rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional type: string readOnly: description: |- - ReadOnly here will force the ReadOnly setting in VolumeMounts. + readOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional @@ -2957,7 +3176,7 @@ definitions: $ref: '#/definitions/LocalObjectReference' user: description: |- - The rados user name. + user is the rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional @@ -2973,8 +3192,9 @@ definitions: type: string type: object ResourceFieldSelector: - description: ResourceFieldSelector represents container resources (cpu, memory) - and their output format + description: |- + ResourceFieldSelector represents container resources (cpu, memory) and their output format + +structType=atomic properties: containerName: description: |- @@ -3026,6 +3246,8 @@ definitions: manifest: description: Manifest contains the kubernetes manifest type: string + manifestFrom: + $ref: '#/definitions/ManifestFrom' mergeStrategy: description: |- MergeStrategy is the strategy used to merge a patch. It defaults to "strategic" @@ -3155,23 +3377,23 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Default is "xfs". +optional type: string gateway: - description: The host address of the ScaleIO API Gateway. + description: gateway is the host address of the ScaleIO API Gateway. type: string protectionDomain: description: |- - The name of the ScaleIO Protection Domain for the configured storage. + protectionDomain is the name of the ScaleIO Protection Domain for the configured storage. +optional type: string readOnly: description: |- - Defaults to false (read/write). ReadOnly here will force + readOnly Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean @@ -3179,26 +3401,26 @@ definitions: $ref: '#/definitions/LocalObjectReference' sslEnabled: description: |- - Flag to enable/disable SSL communication with Gateway, default false + sslEnabled Flag enable/disable SSL communication with Gateway, default false +optional type: boolean storageMode: description: |- - Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. + storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned. +optional type: string storagePool: description: |- - The ScaleIO Storage Pool associated with the protection domain. + storagePool is the ScaleIO Storage Pool associated with the protection domain. +optional type: string system: - description: The name of the storage system as configured in ScaleIO. + description: system is the name of the storage system as configured in ScaleIO. type: string volumeName: description: |- - The name of a volume already created in the ScaleIO system + volumeName is the name of a volume already created in the ScaleIO system that is associated with this volume source. type: string type: object @@ -3209,12 +3431,12 @@ definitions: args: description: |- Arguments to the entrypoint. - The docker image's CMD is used if this is not provided. + The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -3223,12 +3445,12 @@ definitions: command: description: |- Entrypoint array. Not executed within a shell. - The docker image's ENTRYPOINT is used if this is not provided. + The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -3258,7 +3480,7 @@ definitions: type: array image: description: |- - Docker image name. + Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -3388,6 +3610,7 @@ definitions: title: SeccompProfile defines a pod/container's seccomp profile settings. type: object SeccompProfileType: + description: +enum title: SeccompProfileType defines the supported seccomp profile types. type: string SecretEnvSource: @@ -3412,6 +3635,7 @@ definitions: variables with. type: object SecretKeySelector: + description: +structType=atomic properties: key: description: The key of the secret to select from. Must be a valid secret @@ -3440,7 +3664,7 @@ definitions: properties: items: description: |- - If unspecified, each key-value pair in the Data field of the referenced + items if unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -3460,7 +3684,7 @@ definitions: type: string optional: description: |- - Specify whether the Secret or its key must be defined + optional field specify whether the Secret or its key must be defined +optional type: boolean title: Adapts a secret into a projected volume. @@ -3473,7 +3697,7 @@ definitions: properties: defaultMode: description: |- - Optional: mode bits used to set permissions on created files by default. + defaultMode is Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. @@ -3485,7 +3709,7 @@ definitions: type: integer items: description: |- - If unspecified, each key-value pair in the Data field of the referenced + items If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be @@ -3498,12 +3722,12 @@ definitions: type: array optional: description: |- - Specify whether the Secret or its keys must be defined + optional field specify whether the Secret or its keys must be defined +optional type: boolean secretName: description: |- - Name of the secret in the pod's namespace to use. + secretName is the name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret +optional type: string @@ -3522,6 +3746,7 @@ definitions: AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN + Note that this field cannot be set when spec.os.name is windows. +optional type: boolean capabilities: @@ -3531,6 +3756,7 @@ definitions: Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. + Note that this field cannot be set when spec.os.name is windows. +optional type: boolean procMount: @@ -3539,6 +3765,7 @@ definitions: description: |- Whether this container has a read-only root filesystem. Default is false. + Note that this field cannot be set when spec.os.name is windows. +optional type: boolean runAsGroup: @@ -3547,6 +3774,7 @@ definitions: Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. + Note that this field cannot be set when spec.os.name is windows. +optional format: int64 type: integer @@ -3566,6 +3794,7 @@ definitions: Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. + Note that this field cannot be set when spec.os.name is windows. +optional format: int64 type: integer @@ -3606,7 +3835,7 @@ definitions: properties: audience: description: |- - Audience is the intended audience of the token. A recipient of a token + audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver. @@ -3614,7 +3843,7 @@ definitions: type: string expirationSeconds: description: |- - ExpirationSeconds is the requested duration of validity of the service + expirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of @@ -3625,7 +3854,7 @@ definitions: type: integer path: description: |- - Path is the path relative to the mount point of the file to project the + path is the path relative to the mount point of the file to project the token into. type: string type: object @@ -3636,14 +3865,14 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional type: string readOnly: description: |- - Defaults to false (read/write). ReadOnly here will force + readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional type: boolean @@ -3651,12 +3880,12 @@ definitions: $ref: '#/definitions/LocalObjectReference' volumeName: description: |- - VolumeName is the human-readable name of the StorageOS volume. Volume + volumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace. type: string volumeNamespace: description: |- - VolumeNamespace specifies the scope of the volume within StorageOS. If no + volumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. @@ -3709,6 +3938,7 @@ definitions: $ref: '#/definitions/IntOrString' type: object TaintEffect: + description: +enum type: string TarStrategy: description: TarStrategy will tar and gzip the file or directory when saving @@ -3799,7 +4029,7 @@ definitions: format: int64 type: integer plugin: - $ref: '#/definitions/Object' + $ref: '#/definitions/Plugin' podSpecPatch: description: |- PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of @@ -3850,7 +4080,7 @@ definitions: $ref: '#/definitions/Synchronization' timeout: description: |- - Timout allows to set the total node execution timeout duration counting from the node's start time. + Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates. type: string tolerations: @@ -3885,6 +4115,7 @@ definitions: title: TemplateRef is a reference of template resource. type: object TerminationMessagePolicy: + description: +enum title: TerminationMessagePolicy describes how termination messages are retrieved from a container. type: string @@ -3930,6 +4161,7 @@ definitions: type: string type: object TolerationOperator: + description: +enum title: A toleration operator is the set of operators that can be used in a toleration. type: string Transformation: @@ -3950,6 +4182,7 @@ definitions: description: |- TypedLocalObjectReference contains enough information to let you locate the typed referenced object inside the same namespace. + +structType=atomic properties: apiGroup: description: |- @@ -3972,20 +4205,21 @@ definitions: intent and helps make sure that UIDs and names do not get conflated. type: string URIScheme: - description: URIScheme identifies the scheme used for connection to a host for - Get actions + description: |- + URIScheme identifies the scheme used for connection to a host for Get actions + +enum type: string UserContainer: properties: args: description: |- Arguments to the entrypoint. - The docker image's CMD is used if this is not provided. + The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -3994,12 +4228,12 @@ definitions: command: description: |- Entrypoint array. Not executed within a shell. - The docker image's ENTRYPOINT is used if this is not provided. + The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable - cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax - can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, - regardless of whether the variable exists or not. - Cannot be updated. + cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell +optional items: @@ -4029,7 +4263,7 @@ definitions: type: array image: description: |- - Docker image name. + Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. @@ -4221,7 +4455,7 @@ definitions: $ref: '#/definitions/ISCSIVolumeSource' name: description: |- - Volume's name. + name of the volume. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names type: string @@ -4312,23 +4546,23 @@ definitions: properties: fsType: description: |- - Filesystem type to mount. + fsType is filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional type: string storagePolicyID: description: |- - Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName. + storagePolicyID is the storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName. +optional type: string storagePolicyName: description: |- - Storage Policy Based Management (SPBM) profile name. + storagePolicyName is the storage Policy Based Management (SPBM) profile name. +optional type: string volumePath: - description: Path that identifies vSphere volume vmdk + description: volumePath is the path that identifies vSphere volume vmdk type: string title: Represents a vSphere volume resource. type: object @@ -4359,6 +4593,17 @@ definitions: GMSACredentialSpecName is the name of the GMSA credential spec to use. +optional type: string + hostProcess: + description: |- + HostProcess determines if a container should be run as a 'Host Process' container. + This field is alpha-level and will only be honored by components that enable the + WindowsHostProcessContainers feature flag. Setting this field without the feature + flag will result in errors when validating the Pod. All of a Pod's containers must + have the same effective HostProcess value (it is not allowed to have a mix of HostProcess + containers and non-HostProcess containers). In addition, if HostProcess is true + then HostNetwork must also be set to true. + +optional + type: boolean runAsUserName: description: |- The UserName in Windows to run the entrypoint of the container process. diff --git a/sdks/java/Makefile b/sdks/java/Makefile index 6070c5afe4af..15465d16cb65 100644 --- a/sdks/java/Makefile +++ b/sdks/java/Makefile @@ -32,6 +32,7 @@ generate: -g java \ -o /wd \ -p hideGenerationTimestamp=true \ + -p serializationLibrary=jsonb \ -p dateLibrary=java8 \ --api-package io.argoproj.workflow.apis \ --invoker-package io.argoproj.workflow \ @@ -39,6 +40,7 @@ generate: --skip-validate-spec \ --group-id io.argoproj.workflow \ --artifact-id argo-client-java \ + --artifact-version $(VERSION) \ --import-mappings Time=java.time.Instant \ --import-mappings Affinity=io.kubernetes.client.openapi.models.V1Affinity \ --import-mappings ConfigMapKeySelector=io.kubernetes.client.openapi.models.V1ConfigMapKeySelector \ @@ -67,6 +69,15 @@ generate: --generate-alias-as-model # https://vsupalov.com/docker-shared-permissions/#set-the-docker-user-when-running-your-container $(CHOWN) $(WD) || sudo $(CHOWN) $(WD) - # replace the generated pom.xml, because that has too many dependencies - sed 's/0.0.0-VERSION/$(VERSION)/' pom.xml > $(WD)/pom.xml + # adding kubernetes-client + cd client && sed 's//io.kubernetes<\/groupId>client-java<\/artifactId>14.0.1<\/version><\/dependency>/g' pom.xml > tmp && mv tmp pom.xml +client/pom.xml: generate + +install: client/pom.xml + mvn -f client install + +test: + ../../hack/access-token.sh init + kubectl delete wf --all + env ARGO_TOKEN="`../../hack/access-token.sh get`" mvn -f tests test \ No newline at end of file diff --git a/sdks/java/README.md b/sdks/java/README.md index 33b1c4c3f145..c0b95c79d2e9 100644 --- a/sdks/java/README.md +++ b/sdks/java/README.md @@ -7,8 +7,8 @@ This provides model and APIs for accessing the Argo Server API rather. If you wish to access the Kubernetes APIs, you can use the models to do this. You'll need to write your own code to speak to the API. -⚠️ The Java SDK is published to Github packages, not Maven Central. You must update your Maven settings.xml -file: [how to do that](https://github.com/argoproj/argo-workflows/packages). +⚠️ The Java SDK is published to GitHub Packages, not Maven Central. You must update your Maven `settings.xml` +file: [how to do that](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry). Recommended: @@ -16,7 +16,7 @@ Recommended: io.argoproj.workflow argo-client-java - 3.3.0 + v3.3.8 ``` diff --git a/sdks/java/client/docs/ArchivedWorkflowServiceApi.md b/sdks/java/client/docs/ArchivedWorkflowServiceApi.md index 8159916cebb2..a4c87d1a057b 100644 --- a/sdks/java/client/docs/ArchivedWorkflowServiceApi.md +++ b/sdks/java/client/docs/ArchivedWorkflowServiceApi.md @@ -25,6 +25,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -32,6 +33,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String uid = "uid_example"; // String | @@ -61,7 +68,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -86,6 +93,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -93,6 +101,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String uid = "uid_example"; // String | @@ -122,7 +136,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -147,6 +161,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -154,6 +169,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); try { @@ -179,7 +200,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -204,6 +225,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -211,6 +233,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. @@ -256,7 +284,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -281,6 +309,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -288,6 +317,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. @@ -335,7 +370,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -360,6 +395,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -367,6 +403,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String uid = "uid_example"; // String | @@ -398,7 +440,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -423,6 +465,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArchivedWorkflowServiceApi; @@ -430,6 +473,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArchivedWorkflowServiceApi apiInstance = new ArchivedWorkflowServiceApi(defaultClient); String uid = "uid_example"; // String | @@ -461,7 +510,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/ArtifactServiceApi.md b/sdks/java/client/docs/ArtifactServiceApi.md index a3f9f6b8d62c..7cb4822de23e 100644 --- a/sdks/java/client/docs/ArtifactServiceApi.md +++ b/sdks/java/client/docs/ArtifactServiceApi.md @@ -4,15 +4,94 @@ All URIs are relative to *http://localhost:2746* Method | HTTP request | Description ------------- | ------------- | ------------- -[**artifactServiceGetInputArtifact**](ArtifactServiceApi.md#artifactServiceGetInputArtifact) | **GET** /input-artifacts/{namespace}/{name}/{podName}/{artifactName} | Get an input artifact. -[**artifactServiceGetInputArtifactByUID**](ArtifactServiceApi.md#artifactServiceGetInputArtifactByUID) | **GET** /input-artifacts-by-uid/{uid}/{podName}/{artifactName} | Get an input artifact by UID. -[**artifactServiceGetOutputArtifact**](ArtifactServiceApi.md#artifactServiceGetOutputArtifact) | **GET** /artifacts/{namespace}/{name}/{podName}/{artifactName} | Get an output artifact. -[**artifactServiceGetOutputArtifactByUID**](ArtifactServiceApi.md#artifactServiceGetOutputArtifactByUID) | **GET** /artifacts-by-uid/{uid}/{podName}/{artifactName} | Get an output artifact by UID. +[**artifactServiceGetArtifactFile**](ArtifactServiceApi.md#artifactServiceGetArtifactFile) | **GET** /artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName} | Get an artifact. +[**artifactServiceGetInputArtifact**](ArtifactServiceApi.md#artifactServiceGetInputArtifact) | **GET** /input-artifacts/{namespace}/{name}/{nodeId}/{artifactName} | Get an input artifact. +[**artifactServiceGetInputArtifactByUID**](ArtifactServiceApi.md#artifactServiceGetInputArtifactByUID) | **GET** /input-artifacts-by-uid/{uid}/{nodeId}/{artifactName} | Get an input artifact by UID. +[**artifactServiceGetOutputArtifact**](ArtifactServiceApi.md#artifactServiceGetOutputArtifact) | **GET** /artifacts/{namespace}/{name}/{nodeId}/{artifactName} | Get an output artifact. +[**artifactServiceGetOutputArtifactByUID**](ArtifactServiceApi.md#artifactServiceGetOutputArtifactByUID) | **GET** /artifacts-by-uid/{uid}/{nodeId}/{artifactName} | Get an output artifact by UID. + +# **artifactServiceGetArtifactFile** +> File artifactServiceGetArtifactFile(namespace, idDiscriminator, id, nodeId, artifactName, artifactDiscriminator) + +Get an artifact. + +### Example +```java +// Import classes: +import io.argoproj.workflow.ApiClient; +import io.argoproj.workflow.ApiException; +import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; +import io.argoproj.workflow.models.*; +import io.argoproj.workflow.apis.ArtifactServiceApi; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = Configuration.getDefaultApiClient(); + defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); + + ArtifactServiceApi apiInstance = new ArtifactServiceApi(defaultClient); + String namespace = "namespace_example"; // String | + String idDiscriminator = "idDiscriminator_example"; // String | + String id = "id_example"; // String | + String nodeId = "nodeId_example"; // String | + String artifactName = "artifactName_example"; // String | + String artifactDiscriminator = "artifactDiscriminator_example"; // String | + try { + File result = apiInstance.artifactServiceGetArtifactFile(namespace, idDiscriminator, id, nodeId, artifactName, artifactDiscriminator); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ArtifactServiceApi#artifactServiceGetArtifactFile"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **namespace** | **String**| | + **idDiscriminator** | **String**| | [enum: workflow, archived-workflows ] + **id** | **String**| | + **nodeId** | **String**| | + **artifactName** | **String**| | + **artifactDiscriminator** | **String**| | [enum: outputs] + +### Return type + +[**File**](File.md) + +### Authorization + +[BearerToken](../README.md#BearerToken) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | An artifact file. | - | +**0** | An unexpected error response. | - | + # **artifactServiceGetInputArtifact** -> artifactServiceGetInputArtifact(namespace, name, podName, artifactName) +> File artifactServiceGetInputArtifact(namespace, name, nodeId, artifactName) Get an input artifact. @@ -22,6 +101,7 @@ Get an input artifact. import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArtifactServiceApi; @@ -29,14 +109,21 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArtifactServiceApi apiInstance = new ArtifactServiceApi(defaultClient); String namespace = "namespace_example"; // String | String name = "name_example"; // String | - String podName = "podName_example"; // String | + String nodeId = "nodeId_example"; // String | String artifactName = "artifactName_example"; // String | try { - apiInstance.artifactServiceGetInputArtifact(namespace, name, podName, artifactName); + File result = apiInstance.artifactServiceGetInputArtifact(namespace, name, nodeId, artifactName); + System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ArtifactServiceApi#artifactServiceGetInputArtifact"); System.err.println("Status code: " + e.getCode()); @@ -54,16 +141,16 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **namespace** | **String**| | **name** | **String**| | - **podName** | **String**| | + **nodeId** | **String**| | **artifactName** | **String**| | ### Return type -null (empty response body) +[**File**](File.md) ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -78,7 +165,7 @@ No authorization required # **artifactServiceGetInputArtifactByUID** -> artifactServiceGetInputArtifactByUID(namespace, uid, podName, artifactName) +> File artifactServiceGetInputArtifactByUID(uid, nodeId, artifactName) Get an input artifact by UID. @@ -88,6 +175,7 @@ Get an input artifact by UID. import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArtifactServiceApi; @@ -95,14 +183,20 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArtifactServiceApi apiInstance = new ArtifactServiceApi(defaultClient); - String namespace = "namespace_example"; // String | String uid = "uid_example"; // String | - String podName = "podName_example"; // String | + String nodeId = "nodeId_example"; // String | String artifactName = "artifactName_example"; // String | try { - apiInstance.artifactServiceGetInputArtifactByUID(namespace, uid, podName, artifactName); + File result = apiInstance.artifactServiceGetInputArtifactByUID(uid, nodeId, artifactName); + System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ArtifactServiceApi#artifactServiceGetInputArtifactByUID"); System.err.println("Status code: " + e.getCode()); @@ -118,18 +212,17 @@ public class Example { Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | **uid** | **String**| | - **podName** | **String**| | + **nodeId** | **String**| | **artifactName** | **String**| | ### Return type -null (empty response body) +[**File**](File.md) ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -144,7 +237,7 @@ No authorization required # **artifactServiceGetOutputArtifact** -> artifactServiceGetOutputArtifact(namespace, name, podName, artifactName) +> File artifactServiceGetOutputArtifact(namespace, name, nodeId, artifactName) Get an output artifact. @@ -154,6 +247,7 @@ Get an output artifact. import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArtifactServiceApi; @@ -161,14 +255,21 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArtifactServiceApi apiInstance = new ArtifactServiceApi(defaultClient); String namespace = "namespace_example"; // String | String name = "name_example"; // String | - String podName = "podName_example"; // String | + String nodeId = "nodeId_example"; // String | String artifactName = "artifactName_example"; // String | try { - apiInstance.artifactServiceGetOutputArtifact(namespace, name, podName, artifactName); + File result = apiInstance.artifactServiceGetOutputArtifact(namespace, name, nodeId, artifactName); + System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ArtifactServiceApi#artifactServiceGetOutputArtifact"); System.err.println("Status code: " + e.getCode()); @@ -186,16 +287,16 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **namespace** | **String**| | **name** | **String**| | - **podName** | **String**| | + **nodeId** | **String**| | **artifactName** | **String**| | ### Return type -null (empty response body) +[**File**](File.md) ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -210,7 +311,7 @@ No authorization required # **artifactServiceGetOutputArtifactByUID** -> artifactServiceGetOutputArtifactByUID(uid, podName, artifactName) +> File artifactServiceGetOutputArtifactByUID(uid, nodeId, artifactName) Get an output artifact by UID. @@ -220,6 +321,7 @@ Get an output artifact by UID. import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ArtifactServiceApi; @@ -227,13 +329,20 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ArtifactServiceApi apiInstance = new ArtifactServiceApi(defaultClient); String uid = "uid_example"; // String | - String podName = "podName_example"; // String | + String nodeId = "nodeId_example"; // String | String artifactName = "artifactName_example"; // String | try { - apiInstance.artifactServiceGetOutputArtifactByUID(uid, podName, artifactName); + File result = apiInstance.artifactServiceGetOutputArtifactByUID(uid, nodeId, artifactName); + System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ArtifactServiceApi#artifactServiceGetOutputArtifactByUID"); System.err.println("Status code: " + e.getCode()); @@ -250,16 +359,16 @@ public class Example { Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **uid** | **String**| | - **podName** | **String**| | + **nodeId** | **String**| | **artifactName** | **String**| | ### Return type -null (empty response body) +[**File**](File.md) ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md b/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md index f77c8ea49c18..ba5d566f3623 100644 --- a/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md +++ b/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md @@ -24,6 +24,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -31,6 +32,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest body = new IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest(); // IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest | @@ -60,7 +67,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -85,6 +92,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -92,6 +100,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); String name = "name_example"; // String | @@ -133,7 +147,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -158,6 +172,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -165,6 +180,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); String name = "name_example"; // String | @@ -196,7 +217,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -221,6 +242,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -228,6 +250,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest body = new IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest(); // IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest | @@ -257,7 +285,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -282,6 +310,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -289,6 +318,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. @@ -334,7 +369,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -359,6 +394,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.ClusterWorkflowTemplateServiceApi; @@ -366,6 +402,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); ClusterWorkflowTemplateServiceApi apiInstance = new ClusterWorkflowTemplateServiceApi(defaultClient); String name = "name_example"; // String | DEPRECATED: This field is ignored. @@ -397,7 +439,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/Condition.md b/sdks/java/client/docs/Condition.md deleted file mode 100644 index 88428fc963ba..000000000000 --- a/sdks/java/client/docs/Condition.md +++ /dev/null @@ -1,19 +0,0 @@ - - -# Condition - -Condition contains details for one aspect of the current state of this API Resource. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**lastTransitionTime** | **java.time.Instant** | | -**message** | **String** | message is a human readable message indicating details about the transition. This may be an empty string. | -**observedGeneration** | **Integer** | observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance. | [optional] -**reason** | **String** | reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty. | -**status** | **String** | status of the condition, one of True, False, Unknown. | -**type** | **String** | type of condition in CamelCase or in foo.example.com/CamelCase. | - - - diff --git a/sdks/java/client/docs/CronWorkflowServiceApi.md b/sdks/java/client/docs/CronWorkflowServiceApi.md index 59fb9a3c576a..9607ebbc7a90 100644 --- a/sdks/java/client/docs/CronWorkflowServiceApi.md +++ b/sdks/java/client/docs/CronWorkflowServiceApi.md @@ -26,6 +26,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -33,6 +34,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -64,7 +71,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -89,6 +96,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -96,6 +104,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -139,7 +153,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -164,6 +178,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -171,6 +186,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -204,7 +225,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -229,6 +250,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -236,6 +258,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -267,7 +295,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -292,6 +320,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -299,6 +328,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -346,7 +381,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -371,6 +406,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -378,6 +414,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -411,7 +453,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -436,6 +478,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -443,6 +486,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -476,7 +525,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -501,6 +550,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.CronWorkflowServiceApi; @@ -508,6 +558,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); CronWorkflowServiceApi apiInstance = new CronWorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -541,7 +597,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/Duration.md b/sdks/java/client/docs/Duration.md deleted file mode 100644 index e6f5fc5176a5..000000000000 --- a/sdks/java/client/docs/Duration.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# Duration - -Duration is a wrapper around time.Duration which supports correct marshaling to YAML and JSON. In particular, it marshals into strings, which can be used as map keys in json. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**duration** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/EventServiceApi.md b/sdks/java/client/docs/EventServiceApi.md index 4a7202005a26..f734d99d5ee5 100644 --- a/sdks/java/client/docs/EventServiceApi.md +++ b/sdks/java/client/docs/EventServiceApi.md @@ -20,6 +20,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventServiceApi; @@ -27,6 +28,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventServiceApi apiInstance = new EventServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -74,7 +81,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -99,6 +106,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventServiceApi; @@ -106,6 +114,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventServiceApi apiInstance = new EventServiceApi(defaultClient); String namespace = "namespace_example"; // String | The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. @@ -139,7 +153,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/EventSourceServiceApi.md b/sdks/java/client/docs/EventSourceServiceApi.md index 88e1d7fc3162..eb4b1557d3fa 100644 --- a/sdks/java/client/docs/EventSourceServiceApi.md +++ b/sdks/java/client/docs/EventSourceServiceApi.md @@ -25,6 +25,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -32,6 +33,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -63,7 +70,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -88,6 +95,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -95,6 +103,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -138,7 +152,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -163,6 +177,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -170,6 +185,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -227,7 +248,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -252,6 +273,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -259,6 +281,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -290,7 +318,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -315,6 +343,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -322,6 +351,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -369,7 +404,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -394,6 +429,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -401,6 +437,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -434,7 +476,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -459,6 +501,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.EventSourceServiceApi; @@ -466,6 +509,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); EventSourceServiceApi apiInstance = new EventSourceServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -513,7 +562,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md deleted file mode 100644 index 158f6181f298..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**accessKeyId** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**secretAccessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**sessionToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md deleted file mode 100644 index 8ad3a951f6fa..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**url** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md deleted file mode 100644 index 35c221533f88..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**resources** | [**io.kubernetes.client.openapi.models.V1ResourceRequirements**](io.kubernetes.client.openapi.models.V1ResourceRequirements.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md deleted file mode 100644 index ea23607a87bb..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md +++ /dev/null @@ -1,41 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**awsElasticBlockStore** | [**AWSElasticBlockStoreVolumeSource**](AWSElasticBlockStoreVolumeSource.md) | | [optional] -**azureDisk** | [**AzureDiskVolumeSource**](AzureDiskVolumeSource.md) | | [optional] -**azureFile** | [**AzureFileVolumeSource**](AzureFileVolumeSource.md) | | [optional] -**cephfs** | [**CephFSVolumeSource**](CephFSVolumeSource.md) | | [optional] -**cinder** | [**CinderVolumeSource**](CinderVolumeSource.md) | | [optional] -**configMap** | [**ConfigMapVolumeSource**](ConfigMapVolumeSource.md) | | [optional] -**csi** | [**CSIVolumeSource**](CSIVolumeSource.md) | | [optional] -**downwardAPI** | [**DownwardAPIVolumeSource**](DownwardAPIVolumeSource.md) | | [optional] -**emptyDir** | [**EmptyDirVolumeSource**](EmptyDirVolumeSource.md) | | [optional] -**ephemeral** | [**EphemeralVolumeSource**](EphemeralVolumeSource.md) | | [optional] -**fc** | [**FCVolumeSource**](FCVolumeSource.md) | | [optional] -**flexVolume** | [**FlexVolumeSource**](FlexVolumeSource.md) | | [optional] -**flocker** | [**FlockerVolumeSource**](FlockerVolumeSource.md) | | [optional] -**gcePersistentDisk** | [**GCEPersistentDiskVolumeSource**](GCEPersistentDiskVolumeSource.md) | | [optional] -**gitRepo** | [**GitRepoVolumeSource**](GitRepoVolumeSource.md) | | [optional] -**glusterfs** | [**GlusterfsVolumeSource**](GlusterfsVolumeSource.md) | | [optional] -**hostPath** | [**HostPathVolumeSource**](HostPathVolumeSource.md) | | [optional] -**iscsi** | [**ISCSIVolumeSource**](ISCSIVolumeSource.md) | | [optional] -**nfs** | [**NFSVolumeSource**](NFSVolumeSource.md) | | [optional] -**persistentVolumeClaim** | [**PersistentVolumeClaimVolumeSource**](PersistentVolumeClaimVolumeSource.md) | | [optional] -**photonPersistentDisk** | [**PhotonPersistentDiskVolumeSource**](PhotonPersistentDiskVolumeSource.md) | | [optional] -**portworxVolume** | [**PortworxVolumeSource**](PortworxVolumeSource.md) | | [optional] -**projected** | [**ProjectedVolumeSource**](ProjectedVolumeSource.md) | | [optional] -**quobyte** | [**QuobyteVolumeSource**](QuobyteVolumeSource.md) | | [optional] -**rbd** | [**RBDVolumeSource**](RBDVolumeSource.md) | | [optional] -**scaleIO** | [**ScaleIOVolumeSource**](ScaleIOVolumeSource.md) | | [optional] -**secret** | [**SecretVolumeSource**](SecretVolumeSource.md) | | [optional] -**storageos** | [**StorageOSVolumeSource**](StorageOSVolumeSource.md) | | [optional] -**vsphereVolume** | [**VsphereVirtualDiskVolumeSource**](VsphereVirtualDiskVolumeSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md deleted file mode 100644 index 262155c15194..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**factorPercentage** | **Integer** | | [optional] -**cap** | [**Duration**](Duration.md) | | [optional] -**duration** | [**Duration**](Duration.md) | | [optional] -**jitterPercentage** | **Integer** | | [optional] -**steps** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md deleted file mode 100644 index 46b3533e7b2a..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md deleted file mode 100644 index 66fd551490d8..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**image** | **String** | Image is used in preference to Runtime. | [optional] -**runtime** | **String** | | [optional] -**source** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md deleted file mode 100644 index 7ab947cb4076..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md +++ /dev/null @@ -1,19 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**args** | **List<String>** | | [optional] -**command** | **List<String>** | | [optional] -**env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | | [optional] -**image** | **String** | | [optional] -**in** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md) | | [optional] -**resources** | [**io.kubernetes.client.openapi.models.V1ResourceRequirements**](io.kubernetes.client.openapi.models.V1ResourceRequirements.md) | | [optional] -**volumeMounts** | [**List<io.kubernetes.client.openapi.models.V1VolumeMount>**](io.kubernetes.client.openapi.models.V1VolumeMount.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md deleted file mode 100644 index 8bf9f0fe811e..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**layout** | **String** | | [optional] -**schedule** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md deleted file mode 100644 index d85cc484c595..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**value** | **String** | | [optional] -**valueFrom** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md deleted file mode 100644 index 493de6d2280d..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**secretKeyRef** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md deleted file mode 100644 index 5fe94ca0bfa3..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**actions** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md) | | [optional] -**database** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Database**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md deleted file mode 100644 index 6f264caf43c0..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**commitInterval** | [**Duration**](Duration.md) | | [optional] -**database** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Database**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md) | | [optional] -**initSchema** | **Boolean** | | [optional] -**offsetColumn** | **String** | | [optional] -**pollInterval** | [**Duration**](Duration.md) | | [optional] -**query** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md deleted file mode 100644 index 99f9f021ae64..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**dataSource** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md) | | [optional] -**driver** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md deleted file mode 100644 index 1dc57b7f9941..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**maxSize** | **String** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**uid** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md deleted file mode 100644 index 7da9e6c12e44..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md deleted file mode 100644 index 6c1be3191a0f..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**expression** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md deleted file mode 100644 index 66b3f7447fdf..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md deleted file mode 100644 index 4d39100833b3..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md +++ /dev/null @@ -1,22 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**branch** | **String** | | [optional] -**command** | **List<String>** | | [optional] -**env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | | [optional] -**image** | **String** | | [optional] -**insecureIgnoreHostKey** | **Boolean** | | [optional] -**passwordSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**path** | **String** | +kubebuilder:default=. | [optional] -**sshPrivateKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**url** | **String** | | [optional] -**usernameSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md deleted file mode 100644 index da8122b14ccd..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**endOfGroup** | **String** | | [optional] -**format** | **String** | | [optional] -**key** | **String** | | [optional] -**storage** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md deleted file mode 100644 index d4f3e11abfbd..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**name** | **String** | | [optional] -**value** | **String** | | [optional] -**valueFrom** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md deleted file mode 100644 index b58998957972..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**secretKeyRef** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md deleted file mode 100644 index f1db517c0239..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**headers** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md) | | [optional] -**insecureSkipVerify** | **Boolean** | | [optional] -**url** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md deleted file mode 100644 index 5714cde4bd41..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**serviceName** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md deleted file mode 100644 index 806d0dc653fb..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fifo** | **Boolean** | | [optional] -**http** | **Object** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md deleted file mode 100644 index c499d8ed2b20..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth**](GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md) | | [optional] -**name** | **String** | | [optional] -**natsUrl** | **String** | | [optional] -**subject** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md deleted file mode 100644 index 8b2f42f8714d..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md deleted file mode 100644 index 8a516abb895c..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md deleted file mode 100644 index a68a0eada8d7..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**kafkaConfig** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md) | | [optional] -**name** | **String** | | [optional] -**topic** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md deleted file mode 100644 index 9cebcefbaa75..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**brokers** | **List<String>** | | [optional] -**maxMessageBytes** | **Integer** | | [optional] -**net** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md deleted file mode 100644 index 874f5740a881..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**sasl** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md) | | [optional] -**tls** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS**](GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md deleted file mode 100644 index 7356e54c1dab..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md +++ /dev/null @@ -1,21 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**acks** | **String** | | [optional] -**async** | **Boolean** | | [optional] -**batchSize** | **String** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**compressionType** | **String** | | [optional] -**enableIdempotence** | **Boolean** | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md) | | [optional] -**linger** | [**Duration**](Duration.md) | | [optional] -**maxInflight** | **Integer** | | [optional] -**messageTimeout** | [**Duration**](Duration.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md deleted file mode 100644 index e77b84281127..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fetchMin** | **String** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**fetchWaitMax** | [**Duration**](Duration.md) | | [optional] -**groupId** | **String** | GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated. | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md) | | [optional] -**startOffset** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md deleted file mode 100644 index 02379d6897a7..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**truncate** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md deleted file mode 100644 index 29f71588c205..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractStep** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**expression** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md deleted file mode 100644 index a3ce4b4c1b0b..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**annotations** | **Map<String, String>** | | [optional] -**labels** | **Map<String, String>** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md deleted file mode 100644 index 33a7e68834d0..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**token** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md deleted file mode 100644 index 0a135ffc0ee6..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] -**spec** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md) | | [optional] -**status** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md deleted file mode 100644 index 1a9f3486323c..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**items** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) | | [optional] -**metadata** | [**io.kubernetes.client.openapi.models.V1ListMeta**](io.kubernetes.client.openapi.models.V1ListMeta.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md deleted file mode 100644 index e3e3e85b6255..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**deletionDelay** | [**Duration**](Duration.md) | | [optional] -**steps** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md deleted file mode 100644 index 5f0c1c3e82e5..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**conditions** | [**List<Condition>**](Condition.md) | | [optional] -**lastUpdated** | **java.time.Instant** | | [optional] -**message** | **String** | | [optional] -**phase** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md deleted file mode 100644 index 5bc7d3b27b3a..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**bucket** | **String** | | [optional] -**credentials** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md) | | [optional] -**endpoint** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md) | | [optional] -**name** | **String** | | [optional] -**region** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md deleted file mode 100644 index 10c2cdebb3cf..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md deleted file mode 100644 index 2fd91a412201..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**concurrency** | **Integer** | | [optional] -**pollPeriod** | [**Duration**](Duration.md) | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md deleted file mode 100644 index 6310e942fb57..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**mechanism** | **String** | | [optional] -**password** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**user** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md deleted file mode 100644 index b9ac0292cd16..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**onError** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] -**onRecordNotFound** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] -**statement** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md deleted file mode 100644 index 9c1fb496e228..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**args** | **List<String>** | | [optional] -**sql** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md deleted file mode 100644 index d71f33ae631c..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md +++ /dev/null @@ -1,20 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth**](GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md) | | [optional] -**clusterId** | **String** | | [optional] -**maxInflight** | **Integer** | | [optional] -**name** | **String** | | [optional] -**natsMonitoringUrl** | **String** | | [optional] -**natsUrl** | **String** | | [optional] -**subject** | **String** | | [optional] -**subjectPrefix** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md deleted file mode 100644 index 62c3ed959f58..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**desiredReplicas** | **String** | An expression to determine the number of replicas. Must evaluation to an `int`. | [optional] -**peekDelay** | **String** | | [optional] -**scalingDelay** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md deleted file mode 100644 index feaf010a7432..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**resources** | [**io.kubernetes.client.openapi.models.V1ResourceRequirements**](io.kubernetes.client.openapi.models.V1ResourceRequirements.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md deleted file mode 100644 index 2aa4238ca428..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md +++ /dev/null @@ -1,22 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**db** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md) | | [optional] -**deadLetterQueue** | **Boolean** | | [optional] -**http** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md) | | [optional] -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md) | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md) | | [optional] -**log** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Log**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md) | | [optional] -**name** | **String** | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md) | | [optional] -**stan** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN**](GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md) | | [optional] -**volume** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md deleted file mode 100644 index d4018d6030ce..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md +++ /dev/null @@ -1,22 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**cron** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md) | | [optional] -**db** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md) | | [optional] -**http** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md) | | [optional] -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md) | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md) | | [optional] -**name** | **String** | | [optional] -**retry** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md) | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md) | | [optional] -**stan** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN**](GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md) | | [optional] -**volume** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md deleted file mode 100644 index f61dac8901cd..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] -**spec** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md) | | [optional] -**status** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md deleted file mode 100644 index bf173af4a27e..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md +++ /dev/null @@ -1,37 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**affinity** | [**io.kubernetes.client.openapi.models.V1Affinity**](io.kubernetes.client.openapi.models.V1Affinity.md) | | [optional] -**cat** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md) | | [optional] -**code** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Code**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md) | | [optional] -**container** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Container**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md) | | [optional] -**dedupe** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md) | | [optional] -**expand** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md) | | [optional] -**filter** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md) | | [optional] -**flatten** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md) | | [optional] -**git** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Git**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md) | | [optional] -**group** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Group**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md) | | [optional] -**imagePullSecrets** | [**List<io.kubernetes.client.openapi.models.V1LocalObjectReference>**](io.kubernetes.client.openapi.models.V1LocalObjectReference.md) | | [optional] -**map** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Map**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md) | | [optional] -**metadata** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md) | | [optional] -**name** | **String** | | [optional] -**nodeSelector** | **Map<String, String>** | | [optional] -**replicas** | **Integer** | | [optional] -**restartPolicy** | **String** | | [optional] -**scale** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md) | | [optional] -**serviceAccountName** | **String** | | [optional] -**sidecar** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md) | | [optional] -**sinks** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md) | | [optional] -**sources** | [**List<GithubComArgoprojLabsArgoDataflowApiV1alpha1Source>**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md) | | [optional] -**terminator** | **Boolean** | | [optional] -**tolerations** | [**List<io.kubernetes.client.openapi.models.V1Toleration>**](io.kubernetes.client.openapi.models.V1Toleration.md) | | [optional] -**volumes** | [**List<io.kubernetes.client.openapi.models.V1Volume>**](io.kubernetes.client.openapi.models.V1Volume.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md deleted file mode 100644 index d26500e121bc..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**lastScaledAt** | **java.time.Instant** | | [optional] -**message** | **String** | | [optional] -**phase** | **String** | | [optional] -**reason** | **String** | | [optional] -**replicas** | **Integer** | | [optional] -**selector** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md deleted file mode 100644 index f83726c443f3..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**caCertSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**certSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**keySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md deleted file mode 100644 index 612356730dc0..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractVolumeSource** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md b/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md deleted file mode 100644 index 523f502f91b1..000000000000 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstractVolumeSource** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md) | | [optional] -**concurrency** | **Integer** | | [optional] -**pollPeriod** | [**Duration**](Duration.md) | | [optional] -**readOnly** | **Boolean** | | [optional] - - - diff --git a/sdks/java/client/docs/InfoServiceApi.md b/sdks/java/client/docs/InfoServiceApi.md index c13a5556dc39..79d993cf66ad 100644 --- a/sdks/java/client/docs/InfoServiceApi.md +++ b/sdks/java/client/docs/InfoServiceApi.md @@ -4,11 +4,80 @@ All URIs are relative to *http://localhost:2746* Method | HTTP request | Description ------------- | ------------- | ------------- +[**infoServiceCollectEvent**](InfoServiceApi.md#infoServiceCollectEvent) | **POST** /api/v1/tracking/event | [**infoServiceGetInfo**](InfoServiceApi.md#infoServiceGetInfo) | **GET** /api/v1/info | [**infoServiceGetUserInfo**](InfoServiceApi.md#infoServiceGetUserInfo) | **GET** /api/v1/userinfo | [**infoServiceGetVersion**](InfoServiceApi.md#infoServiceGetVersion) | **GET** /api/v1/version | + +# **infoServiceCollectEvent** +> Object infoServiceCollectEvent(body) + + + +### Example +```java +// Import classes: +import io.argoproj.workflow.ApiClient; +import io.argoproj.workflow.ApiException; +import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; +import io.argoproj.workflow.models.*; +import io.argoproj.workflow.apis.InfoServiceApi; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = Configuration.getDefaultApiClient(); + defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); + + InfoServiceApi apiInstance = new InfoServiceApi(defaultClient); + IoArgoprojWorkflowV1alpha1CollectEventRequest body = new IoArgoprojWorkflowV1alpha1CollectEventRequest(); // IoArgoprojWorkflowV1alpha1CollectEventRequest | + try { + Object result = apiInstance.infoServiceCollectEvent(body); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling InfoServiceApi#infoServiceCollectEvent"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**IoArgoprojWorkflowV1alpha1CollectEventRequest**](IoArgoprojWorkflowV1alpha1CollectEventRequest.md)| | + +### Return type + +**Object** + +### Authorization + +[BearerToken](../README.md#BearerToken) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + # **infoServiceGetInfo** > IoArgoprojWorkflowV1alpha1InfoResponse infoServiceGetInfo() @@ -21,6 +90,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.InfoServiceApi; @@ -28,6 +98,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); InfoServiceApi apiInstance = new InfoServiceApi(defaultClient); try { @@ -53,7 +129,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -78,6 +154,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.InfoServiceApi; @@ -85,6 +162,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); InfoServiceApi apiInstance = new InfoServiceApi(defaultClient); try { @@ -110,7 +193,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -135,6 +218,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.InfoServiceApi; @@ -142,6 +226,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); InfoServiceApi apiInstance = new InfoServiceApi(defaultClient); try { @@ -167,7 +257,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md new file mode 100644 index 000000000000..6ffab8f512f7 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md @@ -0,0 +1,16 @@ + + +# IoArgoprojWorkflowV1alpha1ArtGCStatus + +ArtGCStatus maintains state related to ArtifactGC + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**notSpecified** | **Boolean** | if this is true, we already checked to see if we need to do it and we don't | [optional] +**podsRecouped** | **Map<String, Boolean>** | have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once | [optional] +**strategiesProcessed** | **Map<String, Boolean>** | have Pods been started to perform this strategy? (enables us not to re-process what we've already done) | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md index 6f8b90ca08b4..185ba5cb34d2 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md @@ -10,7 +10,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archiveLogs** | **Boolean** | ArchiveLogs indicates if the container logs should be archived | [optional] +**artifactGC** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] +**deleted** | **Boolean** | Has this been deleted? | [optional] **from** | **String** | From allows an artifact to reference an artifact from a previous step | [optional] **fromExpression** | **String** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md new file mode 100644 index 000000000000..642e6004e6a1 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md @@ -0,0 +1,16 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactGC + +ArtifactGC describes how to delete artifacts from completed Workflows + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**podMetadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] +**serviceAccountName** | **String** | ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion | [optional] +**strategy** | **String** | Strategy is the strategy to use. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md new file mode 100644 index 000000000000..02b9c0c456a9 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md @@ -0,0 +1,14 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactGCSpec + +ArtifactGCSpec specifies the Artifacts that need to be deleted + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifactsByNode** | [**Map<String, IoArgoprojWorkflowV1alpha1ArtifactNodeSpec>**](IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md) | ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md new file mode 100644 index 000000000000..63f11f2494d8 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md @@ -0,0 +1,14 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactGCStatus + +ArtifactGCStatus describes the result of the deletion + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifactResultsByNode** | [**Map<String, IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus>**](IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md) | ArtifactResultsByNode maps Node name to result | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md index fe585e8ec573..dd41fb581833 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archiveLogs** | **Boolean** | ArchiveLogs indicates if the container logs should be archived | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] **git** | [**IoArgoprojWorkflowV1alpha1GitArtifact**](IoArgoprojWorkflowV1alpha1GitArtifact.md) | | [optional] **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifact**](IoArgoprojWorkflowV1alpha1HDFSArtifact.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md new file mode 100644 index 000000000000..5c34893e4c8f --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md @@ -0,0 +1,15 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactNodeSpec + +ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**archiveLocation** | [**IoArgoprojWorkflowV1alpha1ArtifactLocation**](IoArgoprojWorkflowV1alpha1ArtifactLocation.md) | | [optional] +**artifacts** | [**Map<String, IoArgoprojWorkflowV1alpha1Artifact>**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts maps artifact name to Artifact description | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md index cb5f7ff5dac9..1d879bf2a15a 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md @@ -10,7 +10,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archiveLogs** | **Boolean** | ArchiveLogs indicates if the container logs should be archived | [optional] +**artifactGC** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] +**deleted** | **Boolean** | Has this been deleted? | [optional] **from** | **String** | From allows an artifact to reference an artifact from a previous step | [optional] **fromExpression** | **String** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md index 84aa3c62e53d..d3d53c665644 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archiveLogs** | **Boolean** | ArchiveLogs enables log archiving | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifactRepository**](IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md) | | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifactRepository**](IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md) | | [optional] **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifactRepository**](IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md) | | [optional] **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifactRepository**](IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md new file mode 100644 index 000000000000..761300b2d9d0 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md @@ -0,0 +1,16 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactResult + +ArtifactResult describes the result of attempting to delete a given Artifact + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**error** | **String** | Error is an optional error message which should be set if Success==false | [optional] +**name** | **String** | Name is the name of the Artifact | +**success** | **Boolean** | Success describes whether the deletion succeeded | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md new file mode 100644 index 000000000000..4f1607a798b8 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md @@ -0,0 +1,14 @@ + + +# IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus + +ArtifactResultNodeStatus describes the result of the deletion on a given node + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifactResults** | [**Map<String, IoArgoprojWorkflowV1alpha1ArtifactResult>**](IoArgoprojWorkflowV1alpha1ArtifactResult.md) | ArtifactResults maps Artifact name to result of the deletion | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md new file mode 100644 index 000000000000..f97e4d975779 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md @@ -0,0 +1,18 @@ + + +# IoArgoprojWorkflowV1alpha1AzureArtifact + +AzureArtifact is the location of a an Azure Storage artifact + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**accountKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**blob** | **String** | Blob is the blob name (i.e., path) in the container where the artifact resides | +**container** | **String** | Container is the container where resources will be stored | +**endpoint** | **String** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | +**useSDKCreds** | **Boolean** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md new file mode 100644 index 000000000000..087f86c147f3 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md @@ -0,0 +1,18 @@ + + +# IoArgoprojWorkflowV1alpha1AzureArtifactRepository + +AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**accountKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**blobNameFormat** | **String** | BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables | [optional] +**container** | **String** | Container is the container where resources will be stored | +**endpoint** | **String** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | +**useSDKCreds** | **Boolean** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md new file mode 100644 index 000000000000..ea9385b595ad --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md @@ -0,0 +1,15 @@ + + +# IoArgoprojWorkflowV1alpha1BasicAuth + +BasicAuth describes the secret selectors required for basic authentication + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**passwordSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**usernameSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md new file mode 100644 index 000000000000..4e14e9c86a9b --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md @@ -0,0 +1,15 @@ + + +# IoArgoprojWorkflowV1alpha1ClientCertAuth + +ClientCertAuth holds necessary information for client authentication via certificates + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**clientCertSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**clientKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md similarity index 61% rename from sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md rename to sdks/java/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md index 8758f0331e53..cefa97c2b896 100644 --- a/sdks/java/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md @@ -1,6 +1,6 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage +# IoArgoprojWorkflowV1alpha1CollectEventRequest ## Properties @@ -8,7 +8,6 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **String** | | [optional] -**subPath** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md index acc64cdcd173..9294f86ba64a 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md @@ -7,12 +7,12 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**args** | **List<String>** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **List<String>** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **List<String>** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List<String>** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **dependencies** | **List<String>** | | [optional] **env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **envFrom** | [**List<io.kubernetes.client.openapi.models.V1EnvFromSource>**](io.kubernetes.client.openapi.models.V1EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] -**image** | **String** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] +**image** | **String** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **imagePullPolicy** | **String** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**io.kubernetes.client.openapi.models.V1Lifecycle**](io.kubernetes.client.openapi.models.V1Lifecycle.md) | | [optional] **livenessProbe** | [**io.kubernetes.client.openapi.models.V1Probe**](io.kubernetes.client.openapi.models.V1Probe.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md index d34ec4268dd6..b4853e379197 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md @@ -12,6 +12,7 @@ Name | Type | Description | Notes **groups** | **List<String>** | | [optional] **issuer** | **String** | | [optional] **serviceAccountName** | **String** | | [optional] +**serviceAccountNamespace** | **String** | | [optional] **subject** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md index 4d248368e2d3..54589bada4e7 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md @@ -8,6 +8,7 @@ GitArtifact is the location of an git artifact Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**branch** | **String** | Branch is the branch to fetch when `SingleBranch` is enabled | [optional] **depth** | **Integer** | Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip | [optional] **disableSubmodules** | **Boolean** | DisableSubmodules disables submodules during git clone | [optional] **fetch** | **List<String>** | Fetch specifies a number of refs that should be fetched before checkout | [optional] @@ -15,6 +16,7 @@ Name | Type | Description | Notes **passwordSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **repo** | **String** | Repo is the git repository | **revision** | **String** | Revision is the git commit, tag, branch to checkout | [optional] +**singleBranch** | **Boolean** | SingleBranch enables single branch clone, using the `branch` parameter | [optional] **sshPrivateKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **usernameSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md index 8021d210a411..0c0761308588 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md @@ -8,8 +8,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **body** | **String** | Body is content of the HTTP Request | [optional] +**bodyFrom** | [**IoArgoprojWorkflowV1alpha1HTTPBodySource**](IoArgoprojWorkflowV1alpha1HTTPBodySource.md) | | [optional] **headers** | [**List<IoArgoprojWorkflowV1alpha1HTTPHeader>**](IoArgoprojWorkflowV1alpha1HTTPHeader.md) | Headers are an optional list of headers to send with HTTP requests | [optional] -**insecureSkipVerify** | **Boolean** | insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | [optional] +**insecureSkipVerify** | **Boolean** | InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | [optional] **method** | **String** | Method is HTTP methods for HTTP Request | [optional] **successCondition** | **String** | SuccessCondition is an expression if evaluated to true is considered successful | [optional] **timeoutSeconds** | **Integer** | TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md index 069527ee4e5d..97899afebf4b 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md @@ -2,12 +2,13 @@ # IoArgoprojWorkflowV1alpha1HTTPArtifact -HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**auth** | [**IoArgoprojWorkflowV1alpha1HTTPAuth**](IoArgoprojWorkflowV1alpha1HTTPAuth.md) | | [optional] **headers** | [**List<IoArgoprojWorkflowV1alpha1Header>**](IoArgoprojWorkflowV1alpha1Header.md) | Headers are an optional list of headers to send with HTTP requests for artifacts | [optional] **url** | **String** | URL of the artifact | diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md new file mode 100644 index 000000000000..0225f1505fc5 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md @@ -0,0 +1,15 @@ + + +# IoArgoprojWorkflowV1alpha1HTTPAuth + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**basicAuth** | [**IoArgoprojWorkflowV1alpha1BasicAuth**](IoArgoprojWorkflowV1alpha1BasicAuth.md) | | [optional] +**clientCert** | [**IoArgoprojWorkflowV1alpha1ClientCertAuth**](IoArgoprojWorkflowV1alpha1ClientCertAuth.md) | | [optional] +**oauth2** | [**IoArgoprojWorkflowV1alpha1OAuth2Auth**](IoArgoprojWorkflowV1alpha1OAuth2Auth.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md new file mode 100644 index 000000000000..34c531b3de55 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md @@ -0,0 +1,14 @@ + + +# IoArgoprojWorkflowV1alpha1HTTPBodySource + +HTTPBodySource contains the source of the HTTP body. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**bytes** | **byte[]** | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md index c4b0ff16703f..f5ac1bf2a5a4 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md @@ -9,7 +9,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **expression** | **String** | Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored | [optional] -**template** | **String** | Template is the name of the template to execute by the hook | +**template** | **String** | Template is the name of the template to execute by the hook | [optional] **templateRef** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md new file mode 100644 index 000000000000..19d552b5a8bd --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md @@ -0,0 +1,13 @@ + + +# IoArgoprojWorkflowV1alpha1ManifestFrom + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact** | [**IoArgoprojWorkflowV1alpha1Artifact**](IoArgoprojWorkflowV1alpha1Artifact.md) | | + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md new file mode 100644 index 000000000000..d1698c499ebb --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md @@ -0,0 +1,18 @@ + + +# IoArgoprojWorkflowV1alpha1OAuth2Auth + +OAuth2Auth holds all information for client authentication via OAuth2 tokens + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**clientIDSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**clientSecretSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**endpointParams** | [**List<IoArgoprojWorkflowV1alpha1OAuth2EndpointParam>**](IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md) | | [optional] +**scopes** | **List<String>** | | [optional] +**tokenURLSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md new file mode 100644 index 000000000000..4d68de359483 --- /dev/null +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md @@ -0,0 +1,15 @@ + + +# IoArgoprojWorkflowV1alpha1OAuth2EndpointParam + +EndpointParam is for requesting optional fields that should be sent in the oauth request + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **String** | Name is the header name | +**value** | **String** | Value is the literal value to use for the header | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md index 6ff21e43a1d9..cb77a453db7b 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md @@ -12,6 +12,7 @@ Name | Type | Description | Notes **failureCondition** | **String** | FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed | [optional] **flags** | **List<String>** | Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ] | [optional] **manifest** | **String** | Manifest contains the kubernetes manifest | [optional] +**manifestFrom** | [**IoArgoprojWorkflowV1alpha1ManifestFrom**](IoArgoprojWorkflowV1alpha1ManifestFrom.md) | | [optional] **mergeStrategy** | **String** | MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json | [optional] **setOwnerReference** | **Boolean** | SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. | [optional] **successCondition** | **String** | SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md index ff61a9013697..afbb8da0a055 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes **memoized** | **Boolean** | | [optional] **name** | **String** | | [optional] **namespace** | **String** | | [optional] +**parameters** | **List<String>** | | [optional] **uid** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md index 35be45e7a99d..d81e31d0ad0f 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes **name** | **String** | | [optional] **namespace** | **String** | | [optional] **nodeFieldSelector** | **String** | | [optional] +**parameters** | **List<String>** | | [optional] **restartSuccessful** | **Boolean** | | [optional] **uid** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md index ecbf5cbe1d6f..f12b68c0ce1e 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md @@ -8,11 +8,11 @@ ScriptTemplate is a template subtype to enable scripting through code steps Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**args** | **List<String>** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **List<String>** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **List<String>** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List<String>** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **envFrom** | [**List<io.kubernetes.client.openapi.models.V1EnvFromSource>**](io.kubernetes.client.openapi.models.V1EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] -**image** | **String** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | +**image** | **String** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | **imagePullPolicy** | **String** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**io.kubernetes.client.openapi.models.V1Lifecycle**](io.kubernetes.client.openapi.models.V1Lifecycle.md) | | [optional] **livenessProbe** | [**io.kubernetes.client.openapi.models.V1Probe**](io.kubernetes.client.openapi.models.V1Probe.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md index ac9f14ea1028..af7dda53b6a0 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md @@ -8,11 +8,11 @@ UserContainer is a container specified by a user. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**args** | **List<String>** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **List<String>** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **List<String>** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List<String>** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **envFrom** | [**List<io.kubernetes.client.openapi.models.V1EnvFromSource>**](io.kubernetes.client.openapi.models.V1EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] -**image** | **String** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] +**image** | **String** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **imagePullPolicy** | **String** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**io.kubernetes.client.openapi.models.V1Lifecycle**](io.kubernetes.client.openapi.models.V1Lifecycle.md) | | [optional] **livenessProbe** | [**io.kubernetes.client.openapi.models.V1Probe**](io.kubernetes.client.openapi.models.V1Probe.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md index a6cff0a8f3c0..0002a985052a 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes **memoized** | **Boolean** | | [optional] **name** | **String** | | [optional] **namespace** | **String** | | [optional] +**parameters** | **List<String>** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md index a0b2f0c30e72..031587cb5cb8 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md @@ -10,6 +10,7 @@ Name | Type | Description | Notes **name** | **String** | | [optional] **namespace** | **String** | | [optional] **nodeFieldSelector** | **String** | | [optional] +**parameters** | **List<String>** | | [optional] **restartSuccessful** | **Boolean** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md index 3b25cc5667a7..474e58847459 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md @@ -12,6 +12,7 @@ Name | Type | Description | Notes **affinity** | [**io.kubernetes.client.openapi.models.V1Affinity**](io.kubernetes.client.openapi.models.V1Affinity.md) | | [optional] **archiveLogs** | **Boolean** | ArchiveLogs indicates if the container logs should be archived | [optional] **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] +**artifactGC** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactRepositoryRef** | [**IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef**](IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md) | | [optional] **automountServiceAccountToken** | **Boolean** | AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false. | [optional] **dnsConfig** | [**io.kubernetes.client.openapi.models.V1PodDNSConfig**](io.kubernetes.client.openapi.models.V1PodDNSConfig.md) | | [optional] @@ -29,7 +30,7 @@ Name | Type | Description | Notes **podDisruptionBudget** | [**IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec**](IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec.md) | | [optional] **podGC** | [**IoArgoprojWorkflowV1alpha1PodGC**](IoArgoprojWorkflowV1alpha1PodGC.md) | | [optional] **podMetadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] -**podPriority** | **Integer** | Priority to apply to workflow pods. | [optional] +**podPriority** | **Integer** | Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead. | [optional] **podPriorityClassName** | **String** | PriorityClassName to apply to workflow pods. | [optional] **podSpecPatch** | **String** | PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits). | [optional] **priority** | **Integer** | Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first. | [optional] diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md index 795b398bec51..3d614b0af7bc 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md @@ -8,6 +8,7 @@ WorkflowStatus contains overall status information about a workflow Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**artifactGCStatus** | [**IoArgoprojWorkflowV1alpha1ArtGCStatus**](IoArgoprojWorkflowV1alpha1ArtGCStatus.md) | | [optional] **artifactRepositoryRef** | [**IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus**](IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md) | | [optional] **compressedNodes** | **String** | Compressed and base64 decoded Nodes map | [optional] **conditions** | [**List<IoArgoprojWorkflowV1alpha1Condition>**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the Workflow may have | [optional] diff --git a/sdks/java/client/docs/PipelineLogEntry.md b/sdks/java/client/docs/PipelineLogEntry.md deleted file mode 100644 index cf7e117fbccb..000000000000 --- a/sdks/java/client/docs/PipelineLogEntry.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# PipelineLogEntry - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**msg** | **String** | | [optional] -**namespace** | **String** | | [optional] -**pipelineName** | **String** | | [optional] -**stepName** | **String** | | [optional] -**time** | **java.time.Instant** | | [optional] - - - diff --git a/sdks/java/client/docs/PipelinePipelineWatchEvent.md b/sdks/java/client/docs/PipelinePipelineWatchEvent.md deleted file mode 100644 index a557135ee485..000000000000 --- a/sdks/java/client/docs/PipelinePipelineWatchEvent.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# PipelinePipelineWatchEvent - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**_object** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) | | [optional] -**type** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/PipelineServiceApi.md b/sdks/java/client/docs/PipelineServiceApi.md deleted file mode 100644 index 09c66952b31a..000000000000 --- a/sdks/java/client/docs/PipelineServiceApi.md +++ /dev/null @@ -1,542 +0,0 @@ -# PipelineServiceApi - -All URIs are relative to *http://localhost:2746* - -Method | HTTP request | Description -------------- | ------------- | ------------- -[**pipelineServiceDeletePipeline**](PipelineServiceApi.md#pipelineServiceDeletePipeline) | **DELETE** /api/v1/pipelines/{namespace}/{name} | -[**pipelineServiceGetPipeline**](PipelineServiceApi.md#pipelineServiceGetPipeline) | **GET** /api/v1/pipelines/{namespace}/{name} | -[**pipelineServiceListPipelines**](PipelineServiceApi.md#pipelineServiceListPipelines) | **GET** /api/v1/pipelines/{namespace} | -[**pipelineServicePipelineLogs**](PipelineServiceApi.md#pipelineServicePipelineLogs) | **GET** /api/v1/stream/pipelines/{namespace}/logs | -[**pipelineServiceRestartPipeline**](PipelineServiceApi.md#pipelineServiceRestartPipeline) | **POST** /api/v1/pipelines/{namespace}/{name}/restart | -[**pipelineServiceWatchPipelines**](PipelineServiceApi.md#pipelineServiceWatchPipelines) | **GET** /api/v1/stream/pipelines/{namespace} | -[**pipelineServiceWatchSteps**](PipelineServiceApi.md#pipelineServiceWatchSteps) | **GET** /api/v1/stream/steps/{namespace} | - - - -# **pipelineServiceDeletePipeline** -> Object pipelineServiceDeletePipeline(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String name = "name_example"; // String | - String deleteOptionsGracePeriodSeconds = "deleteOptionsGracePeriodSeconds_example"; // String | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. - String deleteOptionsPreconditionsUid = "deleteOptionsPreconditionsUid_example"; // String | Specifies the target UID. +optional. - String deleteOptionsPreconditionsResourceVersion = "deleteOptionsPreconditionsResourceVersion_example"; // String | Specifies the target ResourceVersion +optional. - Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. - String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. - List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. - try { - Object result = apiInstance.pipelineServiceDeletePipeline(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceDeletePipeline"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **name** | **String**| | - **deleteOptionsGracePeriodSeconds** | **String**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **deleteOptionsPreconditionsUid** | **String**| Specifies the target UID. +optional. | [optional] - **deleteOptionsPreconditionsResourceVersion** | **String**| Specifies the target ResourceVersion +optional. | [optional] - **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] - -### Return type - -**Object** - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - - -# **pipelineServiceGetPipeline** -> GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline pipelineServiceGetPipeline(namespace, name, getOptionsResourceVersion) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String name = "name_example"; // String | - String getOptionsResourceVersion = "getOptionsResourceVersion_example"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - try { - GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline result = apiInstance.pipelineServiceGetPipeline(namespace, name, getOptionsResourceVersion); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceGetPipeline"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **name** | **String**| | - **getOptionsResourceVersion** | **String**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - -### Return type - -[**GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - - -# **pipelineServiceListPipelines** -> GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList pipelineServiceListPipelines(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. - String listOptionsFieldSelector = "listOptionsFieldSelector_example"; // String | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. - Boolean listOptionsWatch = true; // Boolean | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. - Boolean listOptionsAllowWatchBookmarks = true; // Boolean | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. - String listOptionsResourceVersion = "listOptionsResourceVersion_example"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsResourceVersionMatch = "listOptionsResourceVersionMatch_example"; // String | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsTimeoutSeconds = "listOptionsTimeoutSeconds_example"; // String | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. - String listOptionsLimit = "listOptionsLimit_example"; // String | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. - String listOptionsContinue = "listOptionsContinue_example"; // String | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. - try { - GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList result = apiInstance.pipelineServiceListPipelines(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceListPipelines"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **listOptionsLabelSelector** | **String**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **listOptionsFieldSelector** | **String**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **listOptionsWatch** | **Boolean**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **listOptionsAllowWatchBookmarks** | **Boolean**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **listOptionsResourceVersion** | **String**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsResourceVersionMatch** | **String**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsTimeoutSeconds** | **String**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **listOptionsLimit** | **String**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **listOptionsContinue** | **String**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - - -# **pipelineServicePipelineLogs** -> StreamResultOfPipelineLogEntry pipelineServicePipelineLogs(namespace, name, stepName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String name = "name_example"; // String | optional - only return entries for this pipeline. - String stepName = "stepName_example"; // String | optional - only return entries for this step. - String grep = "grep_example"; // String | optional - only return entries which match this expresssion. - String podLogOptionsContainer = "podLogOptionsContainer_example"; // String | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. - Boolean podLogOptionsFollow = true; // Boolean | Follow the log stream of the pod. Defaults to false. +optional. - Boolean podLogOptionsPrevious = true; // Boolean | Return previous terminated container logs. Defaults to false. +optional. - String podLogOptionsSinceSeconds = "podLogOptionsSinceSeconds_example"; // String | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. - String podLogOptionsSinceTimeSeconds = "podLogOptionsSinceTimeSeconds_example"; // String | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. - Integer podLogOptionsSinceTimeNanos = 56; // Integer | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. - Boolean podLogOptionsTimestamps = true; // Boolean | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. - String podLogOptionsTailLines = "podLogOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. - String podLogOptionsLimitBytes = "podLogOptionsLimitBytes_example"; // String | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. - Boolean podLogOptionsInsecureSkipTLSVerifyBackend = true; // Boolean | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. - try { - StreamResultOfPipelineLogEntry result = apiInstance.pipelineServicePipelineLogs(namespace, name, stepName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServicePipelineLogs"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **name** | **String**| optional - only return entries for this pipeline. | [optional] - **stepName** | **String**| optional - only return entries for this step. | [optional] - **grep** | **String**| optional - only return entries which match this expresssion. | [optional] - **podLogOptionsContainer** | **String**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **podLogOptionsFollow** | **Boolean**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **podLogOptionsPrevious** | **Boolean**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **podLogOptionsSinceSeconds** | **String**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **podLogOptionsSinceTimeSeconds** | **String**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **podLogOptionsSinceTimeNanos** | **Integer**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **podLogOptionsTimestamps** | **Boolean**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **podLogOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **podLogOptionsLimitBytes** | **String**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **podLogOptionsInsecureSkipTLSVerifyBackend** | **Boolean**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] - -### Return type - -[**StreamResultOfPipelineLogEntry**](StreamResultOfPipelineLogEntry.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - - -# **pipelineServiceRestartPipeline** -> Object pipelineServiceRestartPipeline(namespace, name) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String name = "name_example"; // String | - try { - Object result = apiInstance.pipelineServiceRestartPipeline(namespace, name); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceRestartPipeline"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **name** | **String**| | - -### Return type - -**Object** - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - - -# **pipelineServiceWatchPipelines** -> StreamResultOfPipelinePipelineWatchEvent pipelineServiceWatchPipelines(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. - String listOptionsFieldSelector = "listOptionsFieldSelector_example"; // String | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. - Boolean listOptionsWatch = true; // Boolean | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. - Boolean listOptionsAllowWatchBookmarks = true; // Boolean | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. - String listOptionsResourceVersion = "listOptionsResourceVersion_example"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsResourceVersionMatch = "listOptionsResourceVersionMatch_example"; // String | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsTimeoutSeconds = "listOptionsTimeoutSeconds_example"; // String | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. - String listOptionsLimit = "listOptionsLimit_example"; // String | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. - String listOptionsContinue = "listOptionsContinue_example"; // String | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. - try { - StreamResultOfPipelinePipelineWatchEvent result = apiInstance.pipelineServiceWatchPipelines(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceWatchPipelines"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **listOptionsLabelSelector** | **String**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **listOptionsFieldSelector** | **String**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **listOptionsWatch** | **Boolean**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **listOptionsAllowWatchBookmarks** | **Boolean**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **listOptionsResourceVersion** | **String**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsResourceVersionMatch** | **String**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsTimeoutSeconds** | **String**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **listOptionsLimit** | **String**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **listOptionsContinue** | **String**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**StreamResultOfPipelinePipelineWatchEvent**](StreamResultOfPipelinePipelineWatchEvent.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - - -# **pipelineServiceWatchSteps** -> StreamResultOfPipelineStepWatchEvent pipelineServiceWatchSteps(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue) - - - -### Example -```java -// Import classes: -import io.argoproj.workflow.ApiClient; -import io.argoproj.workflow.ApiException; -import io.argoproj.workflow.Configuration; -import io.argoproj.workflow.models.*; -import io.argoproj.workflow.apis.PipelineServiceApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost:2746"); - - PipelineServiceApi apiInstance = new PipelineServiceApi(defaultClient); - String namespace = "namespace_example"; // String | - String listOptionsLabelSelector = "listOptionsLabelSelector_example"; // String | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. - String listOptionsFieldSelector = "listOptionsFieldSelector_example"; // String | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. - Boolean listOptionsWatch = true; // Boolean | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. - Boolean listOptionsAllowWatchBookmarks = true; // Boolean | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. - String listOptionsResourceVersion = "listOptionsResourceVersion_example"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsResourceVersionMatch = "listOptionsResourceVersionMatch_example"; // String | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional - String listOptionsTimeoutSeconds = "listOptionsTimeoutSeconds_example"; // String | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. - String listOptionsLimit = "listOptionsLimit_example"; // String | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. - String listOptionsContinue = "listOptionsContinue_example"; // String | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. - try { - StreamResultOfPipelineStepWatchEvent result = apiInstance.pipelineServiceWatchSteps(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling PipelineServiceApi#pipelineServiceWatchSteps"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **String**| | - **listOptionsLabelSelector** | **String**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **listOptionsFieldSelector** | **String**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **listOptionsWatch** | **Boolean**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **listOptionsAllowWatchBookmarks** | **Boolean**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **listOptionsResourceVersion** | **String**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsResourceVersionMatch** | **String**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **listOptionsTimeoutSeconds** | **String**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **listOptionsLimit** | **String**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **listOptionsContinue** | **String**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**StreamResultOfPipelineStepWatchEvent**](StreamResultOfPipelineStepWatchEvent.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - diff --git a/sdks/java/client/docs/PipelineStepWatchEvent.md b/sdks/java/client/docs/PipelineStepWatchEvent.md deleted file mode 100644 index 112a9e89297d..000000000000 --- a/sdks/java/client/docs/PipelineStepWatchEvent.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# PipelineStepWatchEvent - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**_object** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Step**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md) | | [optional] -**type** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/SensorServiceApi.md b/sdks/java/client/docs/SensorServiceApi.md index f4b16f3ad28e..6f39d645fee4 100644 --- a/sdks/java/client/docs/SensorServiceApi.md +++ b/sdks/java/client/docs/SensorServiceApi.md @@ -25,6 +25,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -32,6 +33,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -63,7 +70,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -88,6 +95,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -95,6 +103,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -138,7 +152,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -163,6 +177,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -170,6 +185,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -203,7 +224,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -228,6 +249,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -235,6 +257,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -282,7 +310,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -307,6 +335,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -314,6 +343,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -369,7 +404,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -394,6 +429,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -401,6 +437,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -434,7 +476,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -459,6 +501,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.SensorServiceApi; @@ -466,6 +509,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); SensorServiceApi apiInstance = new SensorServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -513,7 +562,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/StreamResultOfPipelineLogEntry.md b/sdks/java/client/docs/StreamResultOfPipelineLogEntry.md deleted file mode 100644 index 1db6ddab38b3..000000000000 --- a/sdks/java/client/docs/StreamResultOfPipelineLogEntry.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# StreamResultOfPipelineLogEntry - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelineLogEntry**](PipelineLogEntry.md) | | [optional] - - - diff --git a/sdks/java/client/docs/StreamResultOfPipelinePipelineWatchEvent.md b/sdks/java/client/docs/StreamResultOfPipelinePipelineWatchEvent.md deleted file mode 100644 index d97b04e5e0ba..000000000000 --- a/sdks/java/client/docs/StreamResultOfPipelinePipelineWatchEvent.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# StreamResultOfPipelinePipelineWatchEvent - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelinePipelineWatchEvent**](PipelinePipelineWatchEvent.md) | | [optional] - - - diff --git a/sdks/java/client/docs/StreamResultOfPipelineStepWatchEvent.md b/sdks/java/client/docs/StreamResultOfPipelineStepWatchEvent.md deleted file mode 100644 index f462487dc294..000000000000 --- a/sdks/java/client/docs/StreamResultOfPipelineStepWatchEvent.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# StreamResultOfPipelineStepWatchEvent - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelineStepWatchEvent**](PipelineStepWatchEvent.md) | | [optional] - - - diff --git a/sdks/java/client/docs/WorkflowServiceApi.md b/sdks/java/client/docs/WorkflowServiceApi.md index eb6646fec470..009ccf2d78a8 100644 --- a/sdks/java/client/docs/WorkflowServiceApi.md +++ b/sdks/java/client/docs/WorkflowServiceApi.md @@ -35,6 +35,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -42,6 +43,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -73,7 +80,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -88,7 +95,7 @@ No authorization required # **workflowServiceDeleteWorkflow** -> Object workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, force) @@ -98,6 +105,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -105,6 +113,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -115,8 +129,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + Boolean force = true; // Boolean | try { - Object result = apiInstance.workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, force); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling WorkflowServiceApi#workflowServiceDeleteWorkflow"); @@ -141,6 +156,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **force** | **Boolean**| | [optional] ### Return type @@ -148,7 +164,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -173,6 +189,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -180,6 +197,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -215,7 +238,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -240,6 +263,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -247,6 +271,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -278,7 +308,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -303,6 +333,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -310,6 +341,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -359,7 +396,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -384,6 +421,7 @@ DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -391,6 +429,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -448,7 +492,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -473,6 +517,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -480,6 +525,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -513,7 +564,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -538,6 +589,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -545,6 +597,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -578,7 +636,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -603,6 +661,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -610,6 +669,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -643,7 +708,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -668,6 +733,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -675,6 +741,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -708,7 +780,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -733,6 +805,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -740,6 +813,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -773,7 +852,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -798,6 +877,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -805,6 +885,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -836,7 +922,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -861,6 +947,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -868,6 +955,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -901,7 +994,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -926,6 +1019,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -933,6 +1027,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -966,7 +1066,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -991,6 +1091,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -998,6 +1099,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -1045,7 +1152,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -1070,6 +1177,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -1077,6 +1185,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -1126,7 +1240,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -1151,6 +1265,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowServiceApi; @@ -1158,6 +1273,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -1215,7 +1336,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/client/docs/WorkflowTemplateServiceApi.md b/sdks/java/client/docs/WorkflowTemplateServiceApi.md index cafbaed7c703..e7e12328d57b 100644 --- a/sdks/java/client/docs/WorkflowTemplateServiceApi.md +++ b/sdks/java/client/docs/WorkflowTemplateServiceApi.md @@ -24,6 +24,7 @@ Method | HTTP request | Description import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -31,6 +32,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -62,7 +69,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -87,6 +94,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -94,6 +102,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -137,7 +151,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -162,6 +176,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -169,6 +184,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -202,7 +223,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -227,6 +248,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -234,6 +256,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -265,7 +293,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -290,6 +318,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -297,6 +326,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -344,7 +379,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -369,6 +404,7 @@ No authorization required import io.argoproj.workflow.ApiClient; import io.argoproj.workflow.ApiException; import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.auth.*; import io.argoproj.workflow.models.*; import io.argoproj.workflow.apis.WorkflowTemplateServiceApi; @@ -376,6 +412,12 @@ public class Example { public static void main(String[] args) { ApiClient defaultClient = Configuration.getDefaultApiClient(); defaultClient.setBasePath("http://localhost:2746"); + + // Configure API key authorization: BearerToken + ApiKeyAuth BearerToken = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + BearerToken.setApiKey("YOUR API KEY"); + // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) + //BearerToken.setApiKeyPrefix("Token"); WorkflowTemplateServiceApi apiInstance = new WorkflowTemplateServiceApi(defaultClient); String namespace = "namespace_example"; // String | @@ -409,7 +451,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/java/pom.xml b/sdks/java/pom.xml deleted file mode 100644 index bca2e4d5eec8..000000000000 --- a/sdks/java/pom.xml +++ /dev/null @@ -1,123 +0,0 @@ - - 4.0.0 - io.argoproj.workflow - argo-client-java - argo-client-java - 0.0.0-VERSION - - - - org.apache.maven.plugins - maven-javadoc-plugin - 3.1.1 - - - attach-javadocs - - jar - - - - - none - - - http.response.details - a - Http Response Details: - - - - - - org.apache.maven.plugins - maven-source-plugin - 2.2.1 - - - attach-sources - - jar-no-fork - - - - - - - - - - sign-artifacts - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.5 - - - sign-artifacts - verify - - sign - - - - - - - - - - - - io.kubernetes - client-java - 9.0.2 - - - io.swagger - swagger-annotations - ${swagger-core-version} - - - com.squareup.okhttp3 - okhttp - ${okhttp-version} - - - com.squareup.okhttp3 - logging-interceptor - ${okhttp-version} - - - com.google.code.gson - gson - ${gson-version} - - - javax.annotation - javax.annotation-api - ${javax-annotation-version} - - - - junit - junit - ${junit-version} - test - - - - 1.8 - ${java.version} - ${java.version} - 1.6.2 - 4.9.1 - 2.8.6 - 1.3.2 - 4.13.1 - UTF-8 - - diff --git a/sdks/java/tests/.gitignore b/sdks/java/tests/.gitignore new file mode 100644 index 000000000000..1de565933b05 --- /dev/null +++ b/sdks/java/tests/.gitignore @@ -0,0 +1 @@ +target \ No newline at end of file diff --git a/sdks/java/tests/pom.xml b/sdks/java/tests/pom.xml new file mode 100644 index 000000000000..1de9fcdcc987 --- /dev/null +++ b/sdks/java/tests/pom.xml @@ -0,0 +1,28 @@ + + 4.0.0 + io.argoproj.workflow + argo-client-java-tests + 0.0.0-VERSION + + + + io.argoproj.workflow + argo-client-java + 0.0.0-SNAPSHOT + + + + junit + junit + 4.13.1 + test + + + + 1.8 + ${java.version} + ${java.version} + UTF-8 + + diff --git a/sdks/java/tests/src/test/java/tests/ClientTest.java b/sdks/java/tests/src/test/java/tests/ClientTest.java new file mode 100644 index 000000000000..a365a5be7564 --- /dev/null +++ b/sdks/java/tests/src/test/java/tests/ClientTest.java @@ -0,0 +1,59 @@ +package tests; + + +import io.argoproj.workflow.ApiClient; +import io.argoproj.workflow.Configuration; +import io.argoproj.workflow.JSON; +import io.argoproj.workflow.apis.WorkflowServiceApi; +import io.argoproj.workflow.auth.ApiKeyAuth; +import io.argoproj.workflow.models.IoArgoprojWorkflowV1alpha1Template; +import io.argoproj.workflow.models.IoArgoprojWorkflowV1alpha1Workflow; +import io.argoproj.workflow.models.IoArgoprojWorkflowV1alpha1WorkflowCreateRequest; +import io.argoproj.workflow.models.IoArgoprojWorkflowV1alpha1WorkflowSpec; +import io.kubernetes.client.openapi.models.V1Container; +import io.kubernetes.client.openapi.models.V1ObjectMeta; +import org.junit.Test; + +import java.util.Collections; + +public class ClientTest { + + private final ApiClient defaultClient = Configuration.getDefaultApiClient(); + + public static final String argoToken = System.getenv().get("ARGO_TOKEN"); + + { + ApiKeyAuth bearerAuth = (ApiKeyAuth) defaultClient.getAuthentication("BearerToken"); + bearerAuth.setApiKey(argoToken); + } + + private final WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient); + private final JSON json = new JSON(); + + @Test + public void testClient() throws Exception { + // create a workflow + IoArgoprojWorkflowV1alpha1WorkflowCreateRequest req = new IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(); + req.setWorkflow( + new IoArgoprojWorkflowV1alpha1Workflow() + .metadata(new V1ObjectMeta().generateName("test-")) + .spec( + new IoArgoprojWorkflowV1alpha1WorkflowSpec() + .entrypoint("main") + .templates( + Collections.singletonList( + new IoArgoprojWorkflowV1alpha1Template() + .name("main") + .container( + new V1Container() + .image("argoproj/argosay:v2") + ) + ) + ) + ) + ); + apiInstance.workflowServiceCreateWorkflow("argo", + req); + + } +} \ No newline at end of file diff --git a/sdks/python/Makefile b/sdks/python/Makefile index dd1bdd6ac0a2..aa647b6f8089 100644 --- a/sdks/python/Makefile +++ b/sdks/python/Makefile @@ -41,3 +41,10 @@ generate: --generate-alias-as-model # https://vsupalov.com/docker-shared-permissions/#set-the-docker-user-when-running-your-container $(CHOWN) $(WD) || sudo $(CHOWN) $(WD) + +install: + pip3 install ./client + +test: + ../../hack/access-token.sh init + env ARGO_TOKEN="`../../hack/access-token.sh get`" python3 tests/*.py diff --git a/sdks/python/README.md b/sdks/python/README.md index 1635b809330a..29e29e7af634 100644 --- a/sdks/python/README.md +++ b/sdks/python/README.md @@ -33,8 +33,9 @@ import yaml import argo_workflows from argo_workflows.api import workflow_service_api -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import \ - IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import ( + IoArgoprojWorkflowV1alpha1WorkflowCreateRequest, +) configuration = argo_workflows.Configuration(host="https://127.0.0.1:2746") configuration.verify_ssl = False @@ -65,10 +66,12 @@ from argo_workflows.api import workflow_service_api from argo_workflows.model.container import Container from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import - IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import - IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import ( + IoArgoprojWorkflowV1alpha1WorkflowCreateRequest, +) +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import ( + IoArgoprojWorkflowV1alpha1WorkflowSpec, +) from argo_workflows.model.object_meta import ObjectMeta configuration = argo_workflows.Configuration(host="https://127.0.0.1:2746") @@ -98,7 +101,7 @@ if __name__ == '__main__': ## Examples -You can find additional examples [here](examples). +You can find additional examples [here](tests). ## API Reference diff --git a/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py b/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py index 268aa08a71eb..327e15dc3715 100644 --- a/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py @@ -110,7 +110,9 @@ def __delete_archived_workflow( self.delete_archived_workflow = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows/{uid}', 'operation_id': 'delete_archived_workflow', 'http_method': 'DELETE', @@ -226,7 +228,9 @@ def __get_archived_workflow( self.get_archived_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows/{uid}', 'operation_id': 'get_archived_workflow', 'http_method': 'GET', @@ -337,7 +341,9 @@ def __list_archived_workflow_label_keys( self.list_archived_workflow_label_keys = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1LabelKeys,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows-label-keys', 'operation_id': 'list_archived_workflow_label_keys', 'http_method': 'GET', @@ -450,7 +456,9 @@ def __list_archived_workflow_label_values( self.list_archived_workflow_label_values = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1LabelValues,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows-label-values', 'operation_id': 'list_archived_workflow_label_values', 'http_method': 'GET', @@ -609,7 +617,9 @@ def __list_archived_workflows( self.list_archived_workflows = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows', 'operation_id': 'list_archived_workflows', 'http_method': 'GET', @@ -772,7 +782,9 @@ def __resubmit_archived_workflow( self.resubmit_archived_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows/{uid}/resubmit', 'operation_id': 'resubmit_archived_workflow', 'http_method': 'PUT', @@ -899,7 +911,9 @@ def __retry_archived_workflow( self.retry_archived_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/archived-workflows/{uid}/retry', 'operation_id': 'retry_archived_workflow', 'http_method': 'PUT', diff --git a/sdks/python/client/argo_workflows/api/artifact_service_api.py b/sdks/python/client/argo_workflows/api/artifact_service_api.py index 03f520abe6fd..1567a97205b3 100644 --- a/sdks/python/client/argo_workflows/api/artifact_service_api.py +++ b/sdks/python/client/argo_workflows/api/artifact_service_api.py @@ -36,11 +36,190 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client + def __get_artifact_file( + self, + namespace, + id_discriminator, + id, + node_id, + artifact_name, + artifact_discriminator="outputs", + **kwargs + ): + """Get an artifact. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, artifact_discriminator="outputs", async_req=True) + >>> result = thread.get() + + Args: + namespace (str): + id_discriminator (str): + id (str): + node_id (str): + artifact_name (str): + artifact_discriminator (str): defaults to "outputs", must be one of ["outputs"] + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + _host_index (int/None): specifies the index of the server + that we want to use. + Default is read from the configuration. + async_req (bool): execute request asynchronously + + Returns: + file_type + If the method is called asynchronously, returns the request + thread. + """ + kwargs['async_req'] = kwargs.get( + 'async_req', False + ) + kwargs['_return_http_data_only'] = kwargs.get( + '_return_http_data_only', True + ) + kwargs['_preload_content'] = kwargs.get( + '_preload_content', True + ) + kwargs['_request_timeout'] = kwargs.get( + '_request_timeout', None + ) + kwargs['_check_input_type'] = kwargs.get( + '_check_input_type', True + ) + kwargs['_check_return_type'] = kwargs.get( + '_check_return_type', True + ) + kwargs['_host_index'] = kwargs.get('_host_index') + kwargs['namespace'] = \ + namespace + kwargs['id_discriminator'] = \ + id_discriminator + kwargs['id'] = \ + id + kwargs['node_id'] = \ + node_id + kwargs['artifact_name'] = \ + artifact_name + kwargs['artifact_discriminator'] = \ + artifact_discriminator + return self.call_with_http_info(**kwargs) + + self.get_artifact_file = _Endpoint( + settings={ + 'response_type': (file_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName}', + 'operation_id': 'get_artifact_file', + 'http_method': 'GET', + 'servers': None, + }, + params_map={ + 'all': [ + 'namespace', + 'id_discriminator', + 'id', + 'node_id', + 'artifact_name', + 'artifact_discriminator', + ], + 'required': [ + 'namespace', + 'id_discriminator', + 'id', + 'node_id', + 'artifact_name', + 'artifact_discriminator', + ], + 'nullable': [ + ], + 'enum': [ + 'id_discriminator', + 'artifact_discriminator', + ], + 'validation': [ + ] + }, + root_map={ + 'validations': { + }, + 'allowed_values': { + ('id_discriminator',): { + + "WORKFLOW": "workflow", + "ARCHIVED-WORKFLOWS_": "archived-workflows " + }, + ('artifact_discriminator',): { + + "OUTPUTS": "outputs" + }, + }, + 'openapi_types': { + 'namespace': + (str,), + 'id_discriminator': + (str,), + 'id': + (str,), + 'node_id': + (str,), + 'artifact_name': + (str,), + 'artifact_discriminator': + (str,), + }, + 'attribute_map': { + 'namespace': 'namespace', + 'id_discriminator': 'idDiscriminator', + 'id': 'id', + 'node_id': 'nodeId', + 'artifact_name': 'artifactName', + 'artifact_discriminator': 'artifactDiscriminator', + }, + 'location_map': { + 'namespace': 'path', + 'id_discriminator': 'path', + 'id': 'path', + 'node_id': 'path', + 'artifact_name': 'path', + 'artifact_discriminator': 'path', + }, + 'collection_format_map': { + } + }, + headers_map={ + 'accept': [ + 'application/json' + ], + 'content_type': [], + }, + api_client=api_client, + callable=__get_artifact_file + ) + def __get_input_artifact( self, namespace, name, - pod_name, + node_id, artifact_name, **kwargs ): @@ -49,13 +228,13 @@ def __get_input_artifact( This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_input_artifact(namespace, name, pod_name, artifact_name, async_req=True) + >>> thread = api.get_input_artifact(namespace, name, node_id, artifact_name, async_req=True) >>> result = thread.get() Args: namespace (str): name (str): - pod_name (str): + node_id (str): artifact_name (str): Keyword Args: @@ -80,7 +259,7 @@ def __get_input_artifact( async_req (bool): execute request asynchronously Returns: - None + file_type If the method is called asynchronously, returns the request thread. """ @@ -107,17 +286,19 @@ def __get_input_artifact( namespace kwargs['name'] = \ name - kwargs['pod_name'] = \ - pod_name + kwargs['node_id'] = \ + node_id kwargs['artifact_name'] = \ artifact_name return self.call_with_http_info(**kwargs) self.get_input_artifact = _Endpoint( settings={ - 'response_type': None, - 'auth': [], - 'endpoint_path': '/input-artifacts/{namespace}/{name}/{podName}/{artifactName}', + 'response_type': (file_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/input-artifacts/{namespace}/{name}/{nodeId}/{artifactName}', 'operation_id': 'get_input_artifact', 'http_method': 'GET', 'servers': None, @@ -126,13 +307,13 @@ def __get_input_artifact( 'all': [ 'namespace', 'name', - 'pod_name', + 'node_id', 'artifact_name', ], 'required': [ 'namespace', 'name', - 'pod_name', + 'node_id', 'artifact_name', ], 'nullable': [ @@ -152,7 +333,7 @@ def __get_input_artifact( (str,), 'name': (str,), - 'pod_name': + 'node_id': (str,), 'artifact_name': (str,), @@ -160,13 +341,13 @@ def __get_input_artifact( 'attribute_map': { 'namespace': 'namespace', 'name': 'name', - 'pod_name': 'podName', + 'node_id': 'nodeId', 'artifact_name': 'artifactName', }, 'location_map': { 'namespace': 'path', 'name': 'path', - 'pod_name': 'path', + 'node_id': 'path', 'artifact_name': 'path', }, 'collection_format_map': { @@ -184,9 +365,8 @@ def __get_input_artifact( def __get_input_artifact_by_uid( self, - namespace, uid, - pod_name, + node_id, artifact_name, **kwargs ): @@ -195,13 +375,12 @@ def __get_input_artifact_by_uid( This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_input_artifact_by_uid(namespace, uid, pod_name, artifact_name, async_req=True) + >>> thread = api.get_input_artifact_by_uid(uid, node_id, artifact_name, async_req=True) >>> result = thread.get() Args: - namespace (str): uid (str): - pod_name (str): + node_id (str): artifact_name (str): Keyword Args: @@ -226,7 +405,7 @@ def __get_input_artifact_by_uid( async_req (bool): execute request asynchronously Returns: - None + file_type If the method is called asynchronously, returns the request thread. """ @@ -249,36 +428,34 @@ def __get_input_artifact_by_uid( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace kwargs['uid'] = \ uid - kwargs['pod_name'] = \ - pod_name + kwargs['node_id'] = \ + node_id kwargs['artifact_name'] = \ artifact_name return self.call_with_http_info(**kwargs) self.get_input_artifact_by_uid = _Endpoint( settings={ - 'response_type': None, - 'auth': [], - 'endpoint_path': '/input-artifacts-by-uid/{uid}/{podName}/{artifactName}', + 'response_type': (file_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/input-artifacts-by-uid/{uid}/{nodeId}/{artifactName}', 'operation_id': 'get_input_artifact_by_uid', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ - 'namespace', 'uid', - 'pod_name', + 'node_id', 'artifact_name', ], 'required': [ - 'namespace', 'uid', - 'pod_name', + 'node_id', 'artifact_name', ], 'nullable': [ @@ -294,25 +471,21 @@ def __get_input_artifact_by_uid( 'allowed_values': { }, 'openapi_types': { - 'namespace': - (str,), 'uid': (str,), - 'pod_name': + 'node_id': (str,), 'artifact_name': (str,), }, 'attribute_map': { - 'namespace': 'namespace', 'uid': 'uid', - 'pod_name': 'podName', + 'node_id': 'nodeId', 'artifact_name': 'artifactName', }, 'location_map': { - 'namespace': 'path', 'uid': 'path', - 'pod_name': 'path', + 'node_id': 'path', 'artifact_name': 'path', }, 'collection_format_map': { @@ -332,7 +505,7 @@ def __get_output_artifact( self, namespace, name, - pod_name, + node_id, artifact_name, **kwargs ): @@ -341,13 +514,13 @@ def __get_output_artifact( This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_output_artifact(namespace, name, pod_name, artifact_name, async_req=True) + >>> thread = api.get_output_artifact(namespace, name, node_id, artifact_name, async_req=True) >>> result = thread.get() Args: namespace (str): name (str): - pod_name (str): + node_id (str): artifact_name (str): Keyword Args: @@ -372,7 +545,7 @@ def __get_output_artifact( async_req (bool): execute request asynchronously Returns: - None + file_type If the method is called asynchronously, returns the request thread. """ @@ -399,17 +572,19 @@ def __get_output_artifact( namespace kwargs['name'] = \ name - kwargs['pod_name'] = \ - pod_name + kwargs['node_id'] = \ + node_id kwargs['artifact_name'] = \ artifact_name return self.call_with_http_info(**kwargs) self.get_output_artifact = _Endpoint( settings={ - 'response_type': None, - 'auth': [], - 'endpoint_path': '/artifacts/{namespace}/{name}/{podName}/{artifactName}', + 'response_type': (file_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/artifacts/{namespace}/{name}/{nodeId}/{artifactName}', 'operation_id': 'get_output_artifact', 'http_method': 'GET', 'servers': None, @@ -418,13 +593,13 @@ def __get_output_artifact( 'all': [ 'namespace', 'name', - 'pod_name', + 'node_id', 'artifact_name', ], 'required': [ 'namespace', 'name', - 'pod_name', + 'node_id', 'artifact_name', ], 'nullable': [ @@ -444,7 +619,7 @@ def __get_output_artifact( (str,), 'name': (str,), - 'pod_name': + 'node_id': (str,), 'artifact_name': (str,), @@ -452,13 +627,13 @@ def __get_output_artifact( 'attribute_map': { 'namespace': 'namespace', 'name': 'name', - 'pod_name': 'podName', + 'node_id': 'nodeId', 'artifact_name': 'artifactName', }, 'location_map': { 'namespace': 'path', 'name': 'path', - 'pod_name': 'path', + 'node_id': 'path', 'artifact_name': 'path', }, 'collection_format_map': { @@ -477,7 +652,7 @@ def __get_output_artifact( def __get_output_artifact_by_uid( self, uid, - pod_name, + node_id, artifact_name, **kwargs ): @@ -486,12 +661,12 @@ def __get_output_artifact_by_uid( This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_output_artifact_by_uid(uid, pod_name, artifact_name, async_req=True) + >>> thread = api.get_output_artifact_by_uid(uid, node_id, artifact_name, async_req=True) >>> result = thread.get() Args: uid (str): - pod_name (str): + node_id (str): artifact_name (str): Keyword Args: @@ -516,7 +691,7 @@ def __get_output_artifact_by_uid( async_req (bool): execute request asynchronously Returns: - None + file_type If the method is called asynchronously, returns the request thread. """ @@ -541,17 +716,19 @@ def __get_output_artifact_by_uid( kwargs['_host_index'] = kwargs.get('_host_index') kwargs['uid'] = \ uid - kwargs['pod_name'] = \ - pod_name + kwargs['node_id'] = \ + node_id kwargs['artifact_name'] = \ artifact_name return self.call_with_http_info(**kwargs) self.get_output_artifact_by_uid = _Endpoint( settings={ - 'response_type': None, - 'auth': [], - 'endpoint_path': '/artifacts-by-uid/{uid}/{podName}/{artifactName}', + 'response_type': (file_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/artifacts-by-uid/{uid}/{nodeId}/{artifactName}', 'operation_id': 'get_output_artifact_by_uid', 'http_method': 'GET', 'servers': None, @@ -559,12 +736,12 @@ def __get_output_artifact_by_uid( params_map={ 'all': [ 'uid', - 'pod_name', + 'node_id', 'artifact_name', ], 'required': [ 'uid', - 'pod_name', + 'node_id', 'artifact_name', ], 'nullable': [ @@ -582,19 +759,19 @@ def __get_output_artifact_by_uid( 'openapi_types': { 'uid': (str,), - 'pod_name': + 'node_id': (str,), 'artifact_name': (str,), }, 'attribute_map': { 'uid': 'uid', - 'pod_name': 'podName', + 'node_id': 'nodeId', 'artifact_name': 'artifactName', }, 'location_map': { 'uid': 'path', - 'pod_name': 'path', + 'node_id': 'path', 'artifact_name': 'path', }, 'collection_format_map': { diff --git a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py index ce85e55656d4..0a39586497c8 100644 --- a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py @@ -109,7 +109,9 @@ def __create_cluster_workflow_template( self.create_cluster_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates', 'operation_id': 'create_cluster_workflow_template', 'http_method': 'POST', @@ -232,7 +234,9 @@ def __delete_cluster_workflow_template( self.delete_cluster_workflow_template = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', 'operation_id': 'delete_cluster_workflow_template', 'http_method': 'DELETE', @@ -380,7 +384,9 @@ def __get_cluster_workflow_template( self.get_cluster_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', 'operation_id': 'get_cluster_workflow_template', 'http_method': 'GET', @@ -501,7 +507,9 @@ def __lint_cluster_workflow_template( self.lint_cluster_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates/lint', 'operation_id': 'lint_cluster_workflow_template', 'http_method': 'POST', @@ -622,7 +630,9 @@ def __list_cluster_workflow_templates( self.list_cluster_workflow_templates = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates', 'operation_id': 'list_cluster_workflow_templates', 'http_method': 'GET', @@ -780,7 +790,9 @@ def __update_cluster_workflow_template( self.update_cluster_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', 'operation_id': 'update_cluster_workflow_template', 'http_method': 'PUT', diff --git a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py index 1edf97a9edaa..c4a89ad955b9 100644 --- a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py @@ -115,7 +115,9 @@ def __create_cron_workflow( self.create_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}', 'operation_id': 'create_cron_workflow', 'http_method': 'POST', @@ -248,7 +250,9 @@ def __delete_cron_workflow( self.delete_cron_workflow = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', 'operation_id': 'delete_cron_workflow', 'http_method': 'DELETE', @@ -406,7 +410,9 @@ def __get_cron_workflow( self.get_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', 'operation_id': 'get_cron_workflow', 'http_method': 'GET', @@ -537,7 +543,9 @@ def __lint_cron_workflow( self.lint_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/lint', 'operation_id': 'lint_cron_workflow', 'http_method': 'POST', @@ -669,7 +677,9 @@ def __list_cron_workflows( self.list_cron_workflows = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflowList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}', 'operation_id': 'list_cron_workflows', 'http_method': 'GET', @@ -838,7 +848,9 @@ def __resume_cron_workflow( self.resume_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}/resume', 'operation_id': 'resume_cron_workflow', 'http_method': 'PUT', @@ -975,7 +987,9 @@ def __suspend_cron_workflow( self.suspend_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}/suspend', 'operation_id': 'suspend_cron_workflow', 'http_method': 'PUT', @@ -1112,7 +1126,9 @@ def __update_cron_workflow( self.update_cron_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', 'operation_id': 'update_cron_workflow', 'http_method': 'PUT', diff --git a/sdks/python/client/argo_workflows/api/event_service_api.py b/sdks/python/client/argo_workflows/api/event_service_api.py index 0741b39af814..4cebb884b9f4 100644 --- a/sdks/python/client/argo_workflows/api/event_service_api.py +++ b/sdks/python/client/argo_workflows/api/event_service_api.py @@ -114,7 +114,9 @@ def __list_workflow_event_bindings( self.list_workflow_event_bindings = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowEventBindingList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-event-bindings/{namespace}', 'operation_id': 'list_workflow_event_bindings', 'http_method': 'GET', @@ -283,7 +285,9 @@ def __receive_event( self.receive_event = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/events/{namespace}/{discriminator}', 'operation_id': 'receive_event', 'http_method': 'POST', diff --git a/sdks/python/client/argo_workflows/api/event_source_service_api.py b/sdks/python/client/argo_workflows/api/event_source_service_api.py index 10843cfd517d..ffae94cdaf27 100644 --- a/sdks/python/client/argo_workflows/api/event_source_service_api.py +++ b/sdks/python/client/argo_workflows/api/event_source_service_api.py @@ -114,7 +114,9 @@ def __create_event_source( self.create_event_source = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/event-sources/{namespace}', 'operation_id': 'create_event_source', 'http_method': 'POST', @@ -247,7 +249,9 @@ def __delete_event_source( self.delete_event_source = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', 'operation_id': 'delete_event_source', 'http_method': 'DELETE', @@ -414,7 +418,9 @@ def __event_sources_logs( self.event_sources_logs = _Endpoint( settings={ 'response_type': (StreamResultOfEventsourceLogEntry,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/stream/event-sources/{namespace}/logs', 'operation_id': 'event_sources_logs', 'http_method': 'GET', @@ -604,7 +610,9 @@ def __get_event_source( self.get_event_source = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', 'operation_id': 'get_event_source', 'http_method': 'GET', @@ -735,7 +743,9 @@ def __list_event_sources( self.list_event_sources = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1EventSourceList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/event-sources/{namespace}', 'operation_id': 'list_event_sources', 'http_method': 'GET', @@ -904,7 +914,9 @@ def __update_event_source( self.update_event_source = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', 'operation_id': 'update_event_source', 'http_method': 'PUT', @@ -1042,7 +1054,9 @@ def __watch_event_sources( self.watch_event_sources = _Endpoint( settings={ 'response_type': (StreamResultOfEventsourceEventSourceWatchEvent,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/stream/event-sources/{namespace}', 'operation_id': 'watch_event_sources', 'http_method': 'GET', diff --git a/sdks/python/client/argo_workflows/api/info_service_api.py b/sdks/python/client/argo_workflows/api/info_service_api.py index 58a28471345f..3c3f33b701dd 100644 --- a/sdks/python/client/argo_workflows/api/info_service_api.py +++ b/sdks/python/client/argo_workflows/api/info_service_api.py @@ -22,6 +22,7 @@ validate_and_convert_types ) from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest from argo_workflows.model.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse from argo_workflows.model.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse from argo_workflows.model.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version @@ -39,6 +40,125 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client + def __collect_event( + self, + body, + **kwargs + ): + """collect_event # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.collect_event(body, async_req=True) + >>> result = thread.get() + + Args: + body (IoArgoprojWorkflowV1alpha1CollectEventRequest): + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + _host_index (int/None): specifies the index of the server + that we want to use. + Default is read from the configuration. + async_req (bool): execute request asynchronously + + Returns: + bool, date, datetime, dict, float, int, list, str, none_type + If the method is called asynchronously, returns the request + thread. + """ + kwargs['async_req'] = kwargs.get( + 'async_req', False + ) + kwargs['_return_http_data_only'] = kwargs.get( + '_return_http_data_only', True + ) + kwargs['_preload_content'] = kwargs.get( + '_preload_content', True + ) + kwargs['_request_timeout'] = kwargs.get( + '_request_timeout', None + ) + kwargs['_check_input_type'] = kwargs.get( + '_check_input_type', True + ) + kwargs['_check_return_type'] = kwargs.get( + '_check_return_type', True + ) + kwargs['_host_index'] = kwargs.get('_host_index') + kwargs['body'] = \ + body + return self.call_with_http_info(**kwargs) + + self.collect_event = _Endpoint( + settings={ + 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), + 'auth': [ + 'BearerToken' + ], + 'endpoint_path': '/api/v1/tracking/event', + 'operation_id': 'collect_event', + 'http_method': 'POST', + 'servers': None, + }, + params_map={ + 'all': [ + 'body', + ], + 'required': [ + 'body', + ], + 'nullable': [ + ], + 'enum': [ + ], + 'validation': [ + ] + }, + root_map={ + 'validations': { + }, + 'allowed_values': { + }, + 'openapi_types': { + 'body': + (IoArgoprojWorkflowV1alpha1CollectEventRequest,), + }, + 'attribute_map': { + }, + 'location_map': { + 'body': 'body', + }, + 'collection_format_map': { + } + }, + headers_map={ + 'accept': [ + 'application/json' + ], + 'content_type': [ + 'application/json' + ] + }, + api_client=api_client, + callable=__collect_event + ) + def __get_info( self, **kwargs @@ -102,7 +222,9 @@ def __get_info( self.get_info = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1InfoResponse,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/info', 'operation_id': 'get_info', 'http_method': 'GET', @@ -206,7 +328,9 @@ def __get_user_info( self.get_user_info = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1GetUserInfoResponse,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/userinfo', 'operation_id': 'get_user_info', 'http_method': 'GET', @@ -310,7 +434,9 @@ def __get_version( self.get_version = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Version,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/version', 'operation_id': 'get_version', 'http_method': 'GET', diff --git a/sdks/python/client/argo_workflows/api/pipeline_service_api.py b/sdks/python/client/argo_workflows/api/pipeline_service_api.py deleted file mode 100644 index bba574393452..000000000000 --- a/sdks/python/client/argo_workflows/api/pipeline_service_api.py +++ /dev/null @@ -1,1167 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline import GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_pipeline_log_entry import StreamResultOfPipelineLogEntry -from argo_workflows.model.stream_result_of_pipeline_pipeline_watch_event import StreamResultOfPipelinePipelineWatchEvent -from argo_workflows.model.stream_result_of_pipeline_step_watch_event import StreamResultOfPipelineStepWatchEvent - - -class PipelineServiceApi(object): - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def __delete_pipeline( - self, - namespace, - name, - **kwargs - ): - """delete_pipeline # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_pipeline(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.call_with_http_info(**kwargs) - - self.delete_pipeline = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], - 'endpoint_path': '/api/v1/pipelines/{namespace}/{name}', - 'operation_id': 'delete_pipeline', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__delete_pipeline - ) - - def __get_pipeline( - self, - namespace, - name, - **kwargs - ): - """get_pipeline # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_pipeline(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.call_with_http_info(**kwargs) - - self.get_pipeline = _Endpoint( - settings={ - 'response_type': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline,), - 'auth': [], - 'endpoint_path': '/api/v1/pipelines/{namespace}/{name}', - 'operation_id': 'get_pipeline', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'get_options_resource_version', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'get_options_resource_version': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'get_options_resource_version': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__get_pipeline - ) - - def __list_pipelines( - self, - namespace, - **kwargs - ): - """list_pipelines # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_pipelines(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.call_with_http_info(**kwargs) - - self.list_pipelines = _Endpoint( - settings={ - 'response_type': (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList,), - 'auth': [], - 'endpoint_path': '/api/v1/pipelines/{namespace}', - 'operation_id': 'list_pipelines', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__list_pipelines - ) - - def __pipeline_logs( - self, - namespace, - **kwargs - ): - """pipeline_logs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.pipeline_logs(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - name (str): optional - only return entries for this pipeline.. [optional] - step_name (str): optional - only return entries for this step.. [optional] - grep (str): optional - only return entries which match this expresssion.. [optional] - pod_log_options_container (str): The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.. [optional] - pod_log_options_follow (bool): Follow the log stream of the pod. Defaults to false. +optional.. [optional] - pod_log_options_previous (bool): Return previous terminated container logs. Defaults to false. +optional.. [optional] - pod_log_options_since_seconds (str): A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.. [optional] - pod_log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] - pod_log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] - pod_log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] - pod_log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] - pod_log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfPipelineLogEntry - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.call_with_http_info(**kwargs) - - self.pipeline_logs = _Endpoint( - settings={ - 'response_type': (StreamResultOfPipelineLogEntry,), - 'auth': [], - 'endpoint_path': '/api/v1/stream/pipelines/{namespace}/logs', - 'operation_id': 'pipeline_logs', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'step_name', - 'grep', - 'pod_log_options_container', - 'pod_log_options_follow', - 'pod_log_options_previous', - 'pod_log_options_since_seconds', - 'pod_log_options_since_time_seconds', - 'pod_log_options_since_time_nanos', - 'pod_log_options_timestamps', - 'pod_log_options_tail_lines', - 'pod_log_options_limit_bytes', - 'pod_log_options_insecure_skip_tls_verify_backend', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'step_name': - (str,), - 'grep': - (str,), - 'pod_log_options_container': - (str,), - 'pod_log_options_follow': - (bool,), - 'pod_log_options_previous': - (bool,), - 'pod_log_options_since_seconds': - (str,), - 'pod_log_options_since_time_seconds': - (str,), - 'pod_log_options_since_time_nanos': - (int,), - 'pod_log_options_timestamps': - (bool,), - 'pod_log_options_tail_lines': - (str,), - 'pod_log_options_limit_bytes': - (str,), - 'pod_log_options_insecure_skip_tls_verify_backend': - (bool,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'step_name': 'stepName', - 'grep': 'grep', - 'pod_log_options_container': 'podLogOptions.container', - 'pod_log_options_follow': 'podLogOptions.follow', - 'pod_log_options_previous': 'podLogOptions.previous', - 'pod_log_options_since_seconds': 'podLogOptions.sinceSeconds', - 'pod_log_options_since_time_seconds': 'podLogOptions.sinceTime.seconds', - 'pod_log_options_since_time_nanos': 'podLogOptions.sinceTime.nanos', - 'pod_log_options_timestamps': 'podLogOptions.timestamps', - 'pod_log_options_tail_lines': 'podLogOptions.tailLines', - 'pod_log_options_limit_bytes': 'podLogOptions.limitBytes', - 'pod_log_options_insecure_skip_tls_verify_backend': 'podLogOptions.insecureSkipTLSVerifyBackend', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'query', - 'step_name': 'query', - 'grep': 'query', - 'pod_log_options_container': 'query', - 'pod_log_options_follow': 'query', - 'pod_log_options_previous': 'query', - 'pod_log_options_since_seconds': 'query', - 'pod_log_options_since_time_seconds': 'query', - 'pod_log_options_since_time_nanos': 'query', - 'pod_log_options_timestamps': 'query', - 'pod_log_options_tail_lines': 'query', - 'pod_log_options_limit_bytes': 'query', - 'pod_log_options_insecure_skip_tls_verify_backend': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__pipeline_logs - ) - - def __restart_pipeline( - self, - namespace, - name, - **kwargs - ): - """restart_pipeline # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.restart_pipeline(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.call_with_http_info(**kwargs) - - self.restart_pipeline = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], - 'endpoint_path': '/api/v1/pipelines/{namespace}/{name}/restart', - 'operation_id': 'restart_pipeline', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__restart_pipeline - ) - - def __watch_pipelines( - self, - namespace, - **kwargs - ): - """watch_pipelines # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_pipelines(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfPipelinePipelineWatchEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.call_with_http_info(**kwargs) - - self.watch_pipelines = _Endpoint( - settings={ - 'response_type': (StreamResultOfPipelinePipelineWatchEvent,), - 'auth': [], - 'endpoint_path': '/api/v1/stream/pipelines/{namespace}', - 'operation_id': 'watch_pipelines', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__watch_pipelines - ) - - def __watch_steps( - self, - namespace, - **kwargs - ): - """watch_steps # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_steps(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfPipelineStepWatchEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.call_with_http_info(**kwargs) - - self.watch_steps = _Endpoint( - settings={ - 'response_type': (StreamResultOfPipelineStepWatchEvent,), - 'auth': [], - 'endpoint_path': '/api/v1/stream/steps/{namespace}', - 'operation_id': 'watch_steps', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client, - callable=__watch_steps - ) diff --git a/sdks/python/client/argo_workflows/api/sensor_service_api.py b/sdks/python/client/argo_workflows/api/sensor_service_api.py index 37d825447428..a7f18512ecf4 100644 --- a/sdks/python/client/argo_workflows/api/sensor_service_api.py +++ b/sdks/python/client/argo_workflows/api/sensor_service_api.py @@ -114,7 +114,9 @@ def __create_sensor( self.create_sensor = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/sensors/{namespace}', 'operation_id': 'create_sensor', 'http_method': 'POST', @@ -247,7 +249,9 @@ def __delete_sensor( self.delete_sensor = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', 'operation_id': 'delete_sensor', 'http_method': 'DELETE', @@ -405,7 +409,9 @@ def __get_sensor( self.get_sensor = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', 'operation_id': 'get_sensor', 'http_method': 'GET', @@ -541,7 +547,9 @@ def __list_sensors( self.list_sensors = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1SensorList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/sensors/{namespace}', 'operation_id': 'list_sensors', 'http_method': 'GET', @@ -715,7 +723,9 @@ def __sensors_logs( self.sensors_logs = _Endpoint( settings={ 'response_type': (StreamResultOfSensorLogEntry,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/stream/sensors/{namespace}/logs', 'operation_id': 'sensors_logs', 'http_method': 'GET', @@ -904,7 +914,9 @@ def __update_sensor( self.update_sensor = _Endpoint( settings={ 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', 'operation_id': 'update_sensor', 'http_method': 'PUT', @@ -1042,7 +1054,9 @@ def __watch_sensors( self.watch_sensors = _Endpoint( settings={ 'response_type': (StreamResultOfSensorSensorWatchEvent,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/stream/sensors/{namespace}', 'operation_id': 'watch_sensors', 'http_method': 'GET', diff --git a/sdks/python/client/argo_workflows/api/workflow_service_api.py b/sdks/python/client/argo_workflows/api/workflow_service_api.py index e65bcd715247..3e629bf0740e 100644 --- a/sdks/python/client/argo_workflows/api/workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_service_api.py @@ -123,7 +123,9 @@ def __create_workflow( self.create_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}', 'operation_id': 'create_workflow', 'http_method': 'POST', @@ -203,6 +205,7 @@ def __delete_workflow( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] + force (bool): [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -256,7 +259,9 @@ def __delete_workflow( self.delete_workflow = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}', 'operation_id': 'delete_workflow', 'http_method': 'DELETE', @@ -272,6 +277,7 @@ def __delete_workflow( 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'force', ], 'required': [ 'namespace', @@ -306,6 +312,8 @@ def __delete_workflow( (str,), 'delete_options_dry_run': ([str],), + 'force': + (bool,), }, 'attribute_map': { 'namespace': 'namespace', @@ -316,6 +324,7 @@ def __delete_workflow( 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'force': 'force', }, 'location_map': { 'namespace': 'path', @@ -326,6 +335,7 @@ def __delete_workflow( 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'force': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -415,7 +425,9 @@ def __get_workflow( self.get_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}', 'operation_id': 'get_workflow', 'http_method': 'GET', @@ -551,7 +563,9 @@ def __lint_workflow( self.lint_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/lint', 'operation_id': 'lint_workflow', 'http_method': 'POST', @@ -684,7 +698,9 @@ def __list_workflows( self.list_workflows = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}', 'operation_id': 'list_workflows', 'http_method': 'GET', @@ -870,7 +886,9 @@ def __pod_logs( self.pod_logs = _Endpoint( settings={ 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/{podName}/log', 'operation_id': 'pod_logs', 'http_method': 'GET', @@ -1066,7 +1084,9 @@ def __resubmit_workflow( self.resubmit_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/resubmit', 'operation_id': 'resubmit_workflow', 'http_method': 'PUT', @@ -1203,7 +1223,9 @@ def __resume_workflow( self.resume_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/resume', 'operation_id': 'resume_workflow', 'http_method': 'PUT', @@ -1340,7 +1362,9 @@ def __retry_workflow( self.retry_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/retry', 'operation_id': 'retry_workflow', 'http_method': 'PUT', @@ -1477,7 +1501,9 @@ def __set_workflow( self.set_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/set', 'operation_id': 'set_workflow', 'http_method': 'PUT', @@ -1614,7 +1640,9 @@ def __stop_workflow( self.stop_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/stop', 'operation_id': 'stop_workflow', 'http_method': 'PUT', @@ -1747,7 +1775,9 @@ def __submit_workflow( self.submit_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/submit', 'operation_id': 'submit_workflow', 'http_method': 'POST', @@ -1878,7 +1908,9 @@ def __suspend_workflow( self.suspend_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/suspend', 'operation_id': 'suspend_workflow', 'http_method': 'PUT', @@ -2015,7 +2047,9 @@ def __terminate_workflow( self.terminate_workflow = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/terminate', 'operation_id': 'terminate_workflow', 'http_method': 'PUT', @@ -2153,7 +2187,9 @@ def __watch_events( self.watch_events = _Endpoint( settings={ 'response_type': (StreamResultOfEvent,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/stream/events/{namespace}', 'operation_id': 'watch_events', 'http_method': 'GET', @@ -2324,7 +2360,9 @@ def __watch_workflows( self.watch_workflows = _Endpoint( settings={ 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-events/{namespace}', 'operation_id': 'watch_workflows', 'http_method': 'GET', @@ -2507,7 +2545,9 @@ def __workflow_logs( self.workflow_logs = _Endpoint( settings={ 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/log', 'operation_id': 'workflow_logs', 'http_method': 'GET', diff --git a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py index cb924e31a256..6e502b91a1e0 100644 --- a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py @@ -113,7 +113,9 @@ def __create_workflow_template( self.create_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}', 'operation_id': 'create_workflow_template', 'http_method': 'POST', @@ -246,7 +248,9 @@ def __delete_workflow_template( self.delete_workflow_template = _Endpoint( settings={ 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', 'operation_id': 'delete_workflow_template', 'http_method': 'DELETE', @@ -404,7 +408,9 @@ def __get_workflow_template( self.get_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', 'operation_id': 'get_workflow_template', 'http_method': 'GET', @@ -535,7 +541,9 @@ def __lint_workflow_template( self.lint_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}/lint', 'operation_id': 'lint_workflow_template', 'http_method': 'POST', @@ -667,7 +675,9 @@ def __list_workflow_templates( self.list_workflow_templates = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplateList,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}', 'operation_id': 'list_workflow_templates', 'http_method': 'GET', @@ -836,7 +846,9 @@ def __update_workflow_template( self.update_workflow_template = _Endpoint( settings={ 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [], + 'auth': [ + 'BearerToken' + ], 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', 'operation_id': 'update_workflow_template', 'http_method': 'PUT', diff --git a/sdks/python/client/argo_workflows/apis/__init__.py b/sdks/python/client/argo_workflows/apis/__init__.py index c7227d44d923..bbd2e429fa68 100644 --- a/sdks/python/client/argo_workflows/apis/__init__.py +++ b/sdks/python/client/argo_workflows/apis/__init__.py @@ -21,7 +21,6 @@ from argo_workflows.api.event_service_api import EventServiceApi from argo_workflows.api.event_source_service_api import EventSourceServiceApi from argo_workflows.api.info_service_api import InfoServiceApi -from argo_workflows.api.pipeline_service_api import PipelineServiceApi from argo_workflows.api.sensor_service_api import SensorServiceApi from argo_workflows.api.workflow_service_api import WorkflowServiceApi from argo_workflows.api.workflow_template_service_api import WorkflowTemplateServiceApi diff --git a/sdks/python/client/argo_workflows/configuration.py b/sdks/python/client/argo_workflows/configuration.py index 1a0edab798ba..a6a7001ea5f2 100644 --- a/sdks/python/client/argo_workflows/configuration.py +++ b/sdks/python/client/argo_workflows/configuration.py @@ -96,22 +96,6 @@ class Configuration(object): The following cookie will be added to the HTTP request: Cookie: JSESSIONID abc123 - - HTTP Basic Authentication Example. - Given the following security scheme in the OpenAPI specification: - components: - securitySchemes: - http_basic_auth: - type: http - scheme: basic - - Configure API client with HTTP basic authentication: - -conf = argo_workflows.Configuration( - username='the-user', - password='the-password', -) - """ _default = None @@ -406,18 +390,11 @@ def auth_settings(self): auth['BearerToken'] = { 'type': 'api_key', 'in': 'header', - 'key': 'authorization', + 'key': 'Authorization', 'value': self.get_api_key_with_prefix( 'BearerToken', ), } - if self.username is not None and self.password is not None: - auth['HTTPBasic'] = { - 'type': 'basic', - 'in': 'header', - 'key': 'Authorization', - 'value': self.get_basic_auth_token() - } return auth def to_debug_report(self): diff --git a/sdks/python/client/argo_workflows/model/condition.py b/sdks/python/client/argo_workflows/model/condition.py deleted file mode 100644 index c841d4a8a15a..000000000000 --- a/sdks/python/client/argo_workflows/model/condition.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class Condition(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'last_transition_time': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - 'status': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - 'observed_generation': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'last_transition_time': 'lastTransitionTime', # noqa: E501 - 'message': 'message', # noqa: E501 - 'reason': 'reason', # noqa: E501 - 'status': 'status', # noqa: E501 - 'type': 'type', # noqa: E501 - 'observed_generation': 'observedGeneration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, last_transition_time, message, reason, status, type, *args, **kwargs): # noqa: E501 - """Condition - a model defined in OpenAPI - - Args: - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. - message (str): message is a human readable message indicating details about the transition. This may be an empty string. - reason (str): reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty. - status (str): status of the condition, one of True, False, Unknown. - type (str): type of condition in CamelCase or in foo.example.com/CamelCase. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - observed_generation (int): observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.last_transition_time = last_transition_time - self.message = message - self.reason = reason - self.status = status - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, last_transition_time, message, reason, status, type, *args, **kwargs): # noqa: E501 - """Condition - a model defined in OpenAPI - - Args: - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. - message (str): message is a human readable message indicating details about the transition. This may be an empty string. - reason (str): reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty. - status (str): status of the condition, one of True, False, Unknown. - type (str): type of condition in CamelCase or in foo.example.com/CamelCase. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - observed_generation (int): observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.last_transition_time = last_transition_time - self.message = message - self.reason = reason - self.status = status - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source.py deleted file mode 100644 index f72a0a6137a9..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source.py +++ /dev/null @@ -1,429 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource - from argo_workflows.model.azure_disk_volume_source import AzureDiskVolumeSource - from argo_workflows.model.azure_file_volume_source import AzureFileVolumeSource - from argo_workflows.model.ceph_fs_volume_source import CephFSVolumeSource - from argo_workflows.model.cinder_volume_source import CinderVolumeSource - from argo_workflows.model.config_map_volume_source import ConfigMapVolumeSource - from argo_workflows.model.csi_volume_source import CSIVolumeSource - from argo_workflows.model.downward_api_volume_source import DownwardAPIVolumeSource - from argo_workflows.model.empty_dir_volume_source import EmptyDirVolumeSource - from argo_workflows.model.ephemeral_volume_source import EphemeralVolumeSource - from argo_workflows.model.fc_volume_source import FCVolumeSource - from argo_workflows.model.flex_volume_source import FlexVolumeSource - from argo_workflows.model.flocker_volume_source import FlockerVolumeSource - from argo_workflows.model.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource - from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource - from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource - from argo_workflows.model.host_path_volume_source import HostPathVolumeSource - from argo_workflows.model.iscsi_volume_source import ISCSIVolumeSource - from argo_workflows.model.nfs_volume_source import NFSVolumeSource - from argo_workflows.model.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource - from argo_workflows.model.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource - from argo_workflows.model.portworx_volume_source import PortworxVolumeSource - from argo_workflows.model.projected_volume_source import ProjectedVolumeSource - from argo_workflows.model.quobyte_volume_source import QuobyteVolumeSource - from argo_workflows.model.rbd_volume_source import RBDVolumeSource - from argo_workflows.model.scale_io_volume_source import ScaleIOVolumeSource - from argo_workflows.model.secret_volume_source import SecretVolumeSource - from argo_workflows.model.storage_os_volume_source import StorageOSVolumeSource - from argo_workflows.model.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource - globals()['AWSElasticBlockStoreVolumeSource'] = AWSElasticBlockStoreVolumeSource - globals()['AzureDiskVolumeSource'] = AzureDiskVolumeSource - globals()['AzureFileVolumeSource'] = AzureFileVolumeSource - globals()['CSIVolumeSource'] = CSIVolumeSource - globals()['CephFSVolumeSource'] = CephFSVolumeSource - globals()['CinderVolumeSource'] = CinderVolumeSource - globals()['ConfigMapVolumeSource'] = ConfigMapVolumeSource - globals()['DownwardAPIVolumeSource'] = DownwardAPIVolumeSource - globals()['EmptyDirVolumeSource'] = EmptyDirVolumeSource - globals()['EphemeralVolumeSource'] = EphemeralVolumeSource - globals()['FCVolumeSource'] = FCVolumeSource - globals()['FlexVolumeSource'] = FlexVolumeSource - globals()['FlockerVolumeSource'] = FlockerVolumeSource - globals()['GCEPersistentDiskVolumeSource'] = GCEPersistentDiskVolumeSource - globals()['GitRepoVolumeSource'] = GitRepoVolumeSource - globals()['GlusterfsVolumeSource'] = GlusterfsVolumeSource - globals()['HostPathVolumeSource'] = HostPathVolumeSource - globals()['ISCSIVolumeSource'] = ISCSIVolumeSource - globals()['NFSVolumeSource'] = NFSVolumeSource - globals()['PersistentVolumeClaimVolumeSource'] = PersistentVolumeClaimVolumeSource - globals()['PhotonPersistentDiskVolumeSource'] = PhotonPersistentDiskVolumeSource - globals()['PortworxVolumeSource'] = PortworxVolumeSource - globals()['ProjectedVolumeSource'] = ProjectedVolumeSource - globals()['QuobyteVolumeSource'] = QuobyteVolumeSource - globals()['RBDVolumeSource'] = RBDVolumeSource - globals()['ScaleIOVolumeSource'] = ScaleIOVolumeSource - globals()['SecretVolumeSource'] = SecretVolumeSource - globals()['StorageOSVolumeSource'] = StorageOSVolumeSource - globals()['VsphereVirtualDiskVolumeSource'] = VsphereVirtualDiskVolumeSource - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'aws_elastic_block_store': (AWSElasticBlockStoreVolumeSource,), # noqa: E501 - 'azure_disk': (AzureDiskVolumeSource,), # noqa: E501 - 'azure_file': (AzureFileVolumeSource,), # noqa: E501 - 'cephfs': (CephFSVolumeSource,), # noqa: E501 - 'cinder': (CinderVolumeSource,), # noqa: E501 - 'config_map': (ConfigMapVolumeSource,), # noqa: E501 - 'csi': (CSIVolumeSource,), # noqa: E501 - 'downward_api': (DownwardAPIVolumeSource,), # noqa: E501 - 'empty_dir': (EmptyDirVolumeSource,), # noqa: E501 - 'ephemeral': (EphemeralVolumeSource,), # noqa: E501 - 'fc': (FCVolumeSource,), # noqa: E501 - 'flex_volume': (FlexVolumeSource,), # noqa: E501 - 'flocker': (FlockerVolumeSource,), # noqa: E501 - 'gce_persistent_disk': (GCEPersistentDiskVolumeSource,), # noqa: E501 - 'git_repo': (GitRepoVolumeSource,), # noqa: E501 - 'glusterfs': (GlusterfsVolumeSource,), # noqa: E501 - 'host_path': (HostPathVolumeSource,), # noqa: E501 - 'iscsi': (ISCSIVolumeSource,), # noqa: E501 - 'nfs': (NFSVolumeSource,), # noqa: E501 - 'persistent_volume_claim': (PersistentVolumeClaimVolumeSource,), # noqa: E501 - 'photon_persistent_disk': (PhotonPersistentDiskVolumeSource,), # noqa: E501 - 'portworx_volume': (PortworxVolumeSource,), # noqa: E501 - 'projected': (ProjectedVolumeSource,), # noqa: E501 - 'quobyte': (QuobyteVolumeSource,), # noqa: E501 - 'rbd': (RBDVolumeSource,), # noqa: E501 - 'scale_io': (ScaleIOVolumeSource,), # noqa: E501 - 'secret': (SecretVolumeSource,), # noqa: E501 - 'storageos': (StorageOSVolumeSource,), # noqa: E501 - 'vsphere_volume': (VsphereVirtualDiskVolumeSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'aws_elastic_block_store': 'awsElasticBlockStore', # noqa: E501 - 'azure_disk': 'azureDisk', # noqa: E501 - 'azure_file': 'azureFile', # noqa: E501 - 'cephfs': 'cephfs', # noqa: E501 - 'cinder': 'cinder', # noqa: E501 - 'config_map': 'configMap', # noqa: E501 - 'csi': 'csi', # noqa: E501 - 'downward_api': 'downwardAPI', # noqa: E501 - 'empty_dir': 'emptyDir', # noqa: E501 - 'ephemeral': 'ephemeral', # noqa: E501 - 'fc': 'fc', # noqa: E501 - 'flex_volume': 'flexVolume', # noqa: E501 - 'flocker': 'flocker', # noqa: E501 - 'gce_persistent_disk': 'gcePersistentDisk', # noqa: E501 - 'git_repo': 'gitRepo', # noqa: E501 - 'glusterfs': 'glusterfs', # noqa: E501 - 'host_path': 'hostPath', # noqa: E501 - 'iscsi': 'iscsi', # noqa: E501 - 'nfs': 'nfs', # noqa: E501 - 'persistent_volume_claim': 'persistentVolumeClaim', # noqa: E501 - 'photon_persistent_disk': 'photonPersistentDisk', # noqa: E501 - 'portworx_volume': 'portworxVolume', # noqa: E501 - 'projected': 'projected', # noqa: E501 - 'quobyte': 'quobyte', # noqa: E501 - 'rbd': 'rbd', # noqa: E501 - 'scale_io': 'scaleIO', # noqa: E501 - 'secret': 'secret', # noqa: E501 - 'storageos': 'storageos', # noqa: E501 - 'vsphere_volume': 'vsphereVolume', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - aws_elastic_block_store (AWSElasticBlockStoreVolumeSource): [optional] # noqa: E501 - azure_disk (AzureDiskVolumeSource): [optional] # noqa: E501 - azure_file (AzureFileVolumeSource): [optional] # noqa: E501 - cephfs (CephFSVolumeSource): [optional] # noqa: E501 - cinder (CinderVolumeSource): [optional] # noqa: E501 - config_map (ConfigMapVolumeSource): [optional] # noqa: E501 - csi (CSIVolumeSource): [optional] # noqa: E501 - downward_api (DownwardAPIVolumeSource): [optional] # noqa: E501 - empty_dir (EmptyDirVolumeSource): [optional] # noqa: E501 - ephemeral (EphemeralVolumeSource): [optional] # noqa: E501 - fc (FCVolumeSource): [optional] # noqa: E501 - flex_volume (FlexVolumeSource): [optional] # noqa: E501 - flocker (FlockerVolumeSource): [optional] # noqa: E501 - gce_persistent_disk (GCEPersistentDiskVolumeSource): [optional] # noqa: E501 - git_repo (GitRepoVolumeSource): [optional] # noqa: E501 - glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 - host_path (HostPathVolumeSource): [optional] # noqa: E501 - iscsi (ISCSIVolumeSource): [optional] # noqa: E501 - nfs (NFSVolumeSource): [optional] # noqa: E501 - persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 - photon_persistent_disk (PhotonPersistentDiskVolumeSource): [optional] # noqa: E501 - portworx_volume (PortworxVolumeSource): [optional] # noqa: E501 - projected (ProjectedVolumeSource): [optional] # noqa: E501 - quobyte (QuobyteVolumeSource): [optional] # noqa: E501 - rbd (RBDVolumeSource): [optional] # noqa: E501 - scale_io (ScaleIOVolumeSource): [optional] # noqa: E501 - secret (SecretVolumeSource): [optional] # noqa: E501 - storageos (StorageOSVolumeSource): [optional] # noqa: E501 - vsphere_volume (VsphereVirtualDiskVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - aws_elastic_block_store (AWSElasticBlockStoreVolumeSource): [optional] # noqa: E501 - azure_disk (AzureDiskVolumeSource): [optional] # noqa: E501 - azure_file (AzureFileVolumeSource): [optional] # noqa: E501 - cephfs (CephFSVolumeSource): [optional] # noqa: E501 - cinder (CinderVolumeSource): [optional] # noqa: E501 - config_map (ConfigMapVolumeSource): [optional] # noqa: E501 - csi (CSIVolumeSource): [optional] # noqa: E501 - downward_api (DownwardAPIVolumeSource): [optional] # noqa: E501 - empty_dir (EmptyDirVolumeSource): [optional] # noqa: E501 - ephemeral (EphemeralVolumeSource): [optional] # noqa: E501 - fc (FCVolumeSource): [optional] # noqa: E501 - flex_volume (FlexVolumeSource): [optional] # noqa: E501 - flocker (FlockerVolumeSource): [optional] # noqa: E501 - gce_persistent_disk (GCEPersistentDiskVolumeSource): [optional] # noqa: E501 - git_repo (GitRepoVolumeSource): [optional] # noqa: E501 - glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 - host_path (HostPathVolumeSource): [optional] # noqa: E501 - iscsi (ISCSIVolumeSource): [optional] # noqa: E501 - nfs (NFSVolumeSource): [optional] # noqa: E501 - persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 - photon_persistent_disk (PhotonPersistentDiskVolumeSource): [optional] # noqa: E501 - portworx_volume (PortworxVolumeSource): [optional] # noqa: E501 - projected (ProjectedVolumeSource): [optional] # noqa: E501 - quobyte (QuobyteVolumeSource): [optional] # noqa: E501 - rbd (RBDVolumeSource): [optional] # noqa: E501 - scale_io (ScaleIOVolumeSource): [optional] # noqa: E501 - secret (SecretVolumeSource): [optional] # noqa: E501 - storageos (StorageOSVolumeSource): [optional] # noqa: E501 - vsphere_volume (VsphereVirtualDiskVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_code.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_code.py deleted file mode 100644 index fb26810ed04f..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_code.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Code(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'image': (str,), # noqa: E501 - 'runtime': (str,), # noqa: E501 - 'source': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'image': 'image', # noqa: E501 - 'runtime': 'runtime', # noqa: E501 - 'source': 'source', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - image (str): Image is used in preference to Runtime.. [optional] # noqa: E501 - runtime (str): [optional] # noqa: E501 - source (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - image (str): Image is used in preference to Runtime.. [optional] # noqa: E501 - runtime (str): [optional] # noqa: E501 - source (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_container.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_container.py deleted file mode 100644 index 9e68a2095744..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_container.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_interface import GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.volume_mount import VolumeMount - globals()['EnvVar'] = EnvVar - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - globals()['ResourceRequirements'] = ResourceRequirements - globals()['VolumeMount'] = VolumeMount - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Container(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'args': ([str],), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'image': (str,), # noqa: E501 - '_in': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'args': 'args', # noqa: E501 - 'command': 'command', # noqa: E501 - 'env': 'env', # noqa: E501 - 'image': 'image', # noqa: E501 - '_in': 'in', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - command ([str]): [optional] # noqa: E501 - env ([EnvVar]): [optional] # noqa: E501 - image (str): [optional] # noqa: E501 - _in (GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - volume_mounts ([VolumeMount]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - command ([str]): [optional] # noqa: E501 - env ([EnvVar]): [optional] # noqa: E501 - image (str): [optional] # noqa: E501 - _in (GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - volume_mounts ([VolumeMount]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cron.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cron.py deleted file mode 100644 index b52910f87eec..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cron.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'layout': (str,), # noqa: E501 - 'schedule': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'layout': 'layout', # noqa: E501 - 'schedule': 'schedule', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - layout (str): [optional] # noqa: E501 - schedule (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - layout (str): [optional] # noqa: E501 - schedule (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source.py deleted file mode 100644 index 757d4c35ee77..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source_from import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'value': (str,), # noqa: E501 - 'value_from': (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'value': 'value', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - value_from (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - value_from (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_sink.py deleted file mode 100644 index 0dd2c5555305..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_sink.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database import GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_action import GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Database'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'actions': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction],), # noqa: E501 - 'database': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'actions': 'actions', # noqa: E501 - 'database': 'database', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - actions ([GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction]): [optional] # noqa: E501 - database (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - actions ([GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction]): [optional] # noqa: E501 - database (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_source.py deleted file mode 100644 index c36e05e05a89..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database import GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Database'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'commit_interval': (Duration,), # noqa: E501 - 'database': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database,), # noqa: E501 - 'init_schema': (bool,), # noqa: E501 - 'offset_column': (str,), # noqa: E501 - 'poll_interval': (Duration,), # noqa: E501 - 'query': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'commit_interval': 'commitInterval', # noqa: E501 - 'database': 'database', # noqa: E501 - 'init_schema': 'initSchema', # noqa: E501 - 'offset_column': 'offsetColumn', # noqa: E501 - 'poll_interval': 'pollInterval', # noqa: E501 - 'query': 'query', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - commit_interval (Duration): [optional] # noqa: E501 - database (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database): [optional] # noqa: E501 - init_schema (bool): [optional] # noqa: E501 - offset_column (str): [optional] # noqa: E501 - poll_interval (Duration): [optional] # noqa: E501 - query (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - commit_interval (Duration): [optional] # noqa: E501 - database (GithubComArgoprojLabsArgoDataflowApiV1alpha1Database): [optional] # noqa: E501 - init_schema (bool): [optional] # noqa: E501 - offset_column (str): [optional] # noqa: E501 - poll_interval (Duration): [optional] # noqa: E501 - query (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_dedupe.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_dedupe.py deleted file mode 100644 index 045b432c93ac..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_dedupe.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 - 'max_size': (str,), # noqa: E501 - 'uid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 - 'max_size': 'maxSize', # noqa: E501 - 'uid': 'uid', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - max_size (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - max_size (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_filter.py deleted file mode 100644 index dd7625e3a28e..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_filter.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 - 'expression': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 - 'expression': 'expression', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_flatten.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_flatten.py deleted file mode 100644 index 197e0d86b36a..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_flatten.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_git.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_git.py deleted file mode 100644 index 7bc87b3ad658..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_git.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['EnvVar'] = EnvVar - globals()['SecretKeySelector'] = SecretKeySelector - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Git(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'branch': (str,), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'image': (str,), # noqa: E501 - 'insecure_ignore_host_key': (bool,), # noqa: E501 - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'ssh_private_key_secret': (SecretKeySelector,), # noqa: E501 - 'url': (str,), # noqa: E501 - 'username_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'branch': 'branch', # noqa: E501 - 'command': 'command', # noqa: E501 - 'env': 'env', # noqa: E501 - 'image': 'image', # noqa: E501 - 'insecure_ignore_host_key': 'insecureIgnoreHostKey', # noqa: E501 - 'password_secret': 'passwordSecret', # noqa: E501 - 'path': 'path', # noqa: E501 - 'ssh_private_key_secret': 'sshPrivateKeySecret', # noqa: E501 - 'url': 'url', # noqa: E501 - 'username_secret': 'usernameSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): [optional] # noqa: E501 - command ([str]): [optional] # noqa: E501 - env ([EnvVar]): [optional] # noqa: E501 - image (str): [optional] # noqa: E501 - insecure_ignore_host_key (bool): [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - path (str): +kubebuilder:default=.. [optional] # noqa: E501 - ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): [optional] # noqa: E501 - command ([str]): [optional] # noqa: E501 - env ([EnvVar]): [optional] # noqa: E501 - image (str): [optional] # noqa: E501 - insecure_ignore_host_key (bool): [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - path (str): +kubebuilder:default=.. [optional] # noqa: E501 - ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_group.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_group.py deleted file mode 100644 index 4691323aa3e2..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_group.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_storage import GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Group(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'end_of_group': (str,), # noqa: E501 - 'format': (str,), # noqa: E501 - 'key': (str,), # noqa: E501 - 'storage': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'end_of_group': 'endOfGroup', # noqa: E501 - 'format': 'format', # noqa: E501 - 'key': 'key', # noqa: E501 - 'storage': 'storage', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - end_of_group (str): [optional] # noqa: E501 - format (str): [optional] # noqa: E501 - key (str): [optional] # noqa: E501 - storage (GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - end_of_group (str): [optional] # noqa: E501 - format (str): [optional] # noqa: E501 - key (str): [optional] # noqa: E501 - storage (GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header.py deleted file mode 100644 index 5fb24c999b67..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - 'value_from': (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - value_from (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - value_from (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_sink.py deleted file mode 100644 index 7131d42e343f..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_sink.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'headers': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader],), # noqa: E501 - 'insecure_skip_verify': (bool,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'headers': 'headers', # noqa: E501 - 'insecure_skip_verify': 'insecureSkipVerify', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - headers ([GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader]): [optional] # noqa: E501 - insecure_skip_verify (bool): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - headers ([GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader]): [optional] # noqa: E501 - insecure_skip_verify (bool): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_source.py deleted file mode 100644 index e9cdb99d3589..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_source.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'service_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'service_name': 'serviceName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_interface.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_interface.py deleted file mode 100644 index 563f8a51f35b..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_interface.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'fifo': (bool,), # noqa: E501 - 'http': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fifo': 'fifo', # noqa: E501 - 'http': 'http', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fifo (bool): [optional] # noqa: E501 - http (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fifo (bool): [optional] # noqa: E501 - http (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream.py deleted file mode 100644 index 018b87e043e8..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth import GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'nats_url': (str,), # noqa: E501 - 'subject': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'name': 'name', # noqa: E501 - 'nats_url': 'natsUrl', # noqa: E501 - 'subject': 'subject', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - nats_url (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - nats_url (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_sink.py deleted file mode 100644 index 29e72ff425e8..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_sink.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'jetstream': (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'jetstream': 'jetstream', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_source.py deleted file mode 100644 index f2a321db265b..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_source.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'jetstream': (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'jetstream': 'jetstream', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka.py deleted file mode 100644 index 4b1bf98b953e..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_config import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'kafka_config': (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'topic': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'kafka_config': 'kafkaConfig', # noqa: E501 - 'name': 'name', # noqa: E501 - 'topic': 'topic', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - kafka_config (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - kafka_config (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_config.py deleted file mode 100644 index 6dfb6e891ee2..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_config.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_net import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'brokers': ([str],), # noqa: E501 - 'max_message_bytes': (int,), # noqa: E501 - 'net': (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'brokers': 'brokers', # noqa: E501 - 'max_message_bytes': 'maxMessageBytes', # noqa: E501 - 'net': 'net', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - brokers ([str]): [optional] # noqa: E501 - max_message_bytes (int): [optional] # noqa: E501 - net (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - brokers ([str]): [optional] # noqa: E501 - max_message_bytes (int): [optional] # noqa: E501 - net (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_net.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_net.py deleted file mode 100644 index baee05cb1abf..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_net.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sasl import GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_tls import GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'sasl': (GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL,), # noqa: E501 - 'tls': (GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'sasl': 'sasl', # noqa: E501 - 'tls': 'tls', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sasl (GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL): [optional] # noqa: E501 - tls (GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sasl (GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL): [optional] # noqa: E501 - tls (GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_sink.py deleted file mode 100644 index ac7bc7b66d91..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_sink.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka import GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'acks': (str,), # noqa: E501 - '_async': (bool,), # noqa: E501 - 'batch_size': (str,), # noqa: E501 - 'compression_type': (str,), # noqa: E501 - 'enable_idempotence': (bool,), # noqa: E501 - 'kafka': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka,), # noqa: E501 - 'linger': (Duration,), # noqa: E501 - 'max_inflight': (int,), # noqa: E501 - 'message_timeout': (Duration,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'acks': 'acks', # noqa: E501 - '_async': 'async', # noqa: E501 - 'batch_size': 'batchSize', # noqa: E501 - 'compression_type': 'compressionType', # noqa: E501 - 'enable_idempotence': 'enableIdempotence', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'linger': 'linger', # noqa: E501 - 'max_inflight': 'maxInflight', # noqa: E501 - 'message_timeout': 'messageTimeout', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - acks (str): [optional] # noqa: E501 - _async (bool): [optional] # noqa: E501 - batch_size (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - compression_type (str): [optional] # noqa: E501 - enable_idempotence (bool): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka): [optional] # noqa: E501 - linger (Duration): [optional] # noqa: E501 - max_inflight (int): [optional] # noqa: E501 - message_timeout (Duration): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - acks (str): [optional] # noqa: E501 - _async (bool): [optional] # noqa: E501 - batch_size (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - compression_type (str): [optional] # noqa: E501 - enable_idempotence (bool): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka): [optional] # noqa: E501 - linger (Duration): [optional] # noqa: E501 - max_inflight (int): [optional] # noqa: E501 - message_timeout (Duration): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_source.py deleted file mode 100644 index 565aa62a35b5..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka import GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'fetch_min': (str,), # noqa: E501 - 'fetch_wait_max': (Duration,), # noqa: E501 - 'group_id': (str,), # noqa: E501 - 'kafka': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka,), # noqa: E501 - 'start_offset': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fetch_min': 'fetchMin', # noqa: E501 - 'fetch_wait_max': 'fetchWaitMax', # noqa: E501 - 'group_id': 'groupId', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'start_offset': 'startOffset', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fetch_min (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - fetch_wait_max (Duration): [optional] # noqa: E501 - group_id (str): GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated.. [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka): [optional] # noqa: E501 - start_offset (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fetch_min (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - fetch_wait_max (Duration): [optional] # noqa: E501 - group_id (str): GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated.. [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka): [optional] # noqa: E501 - start_offset (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_log.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_log.py deleted file mode 100644 index e2807fead6b8..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_log.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Log(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'truncate': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'truncate': 'truncate', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - truncate (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - truncate (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_map.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_map.py deleted file mode 100644 index 9f4702d124a6..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_map.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Map(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 - 'expression': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 - 'expression': 'expression', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_metadata.py deleted file mode 100644 index a613a2569065..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_metadata.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'annotations': ({str: (str,)},), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'labels': 'labels', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline.py deleted file mode 100644 index 78f56ae047d8..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_spec import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_status import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - globals()['ObjectMeta'] = ObjectMeta - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec,), # noqa: E501 - 'status': (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec): [optional] # noqa: E501 - status (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec): [optional] # noqa: E501 - status (GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list.py deleted file mode 100644 index cf9a30bfa513..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline import GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - from argo_workflows.model.list_meta import ListMeta - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - globals()['ListMeta'] = ListMeta - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_spec.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_spec.py deleted file mode 100644 index 7a81f6eb3e09..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_spec.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec import GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'deletion_delay': (Duration,), # noqa: E501 - 'steps': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'deletion_delay': 'deletionDelay', # noqa: E501 - 'steps': 'steps', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - deletion_delay (Duration): [optional] # noqa: E501 - steps ([GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - deletion_delay (Duration): [optional] # noqa: E501 - steps ([GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_status.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_status.py deleted file mode 100644 index 2c7fd3136a40..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_status.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.condition import Condition - globals()['Condition'] = Condition - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'conditions': ([Condition],), # noqa: E501 - 'last_updated': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'phase': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'conditions': 'conditions', # noqa: E501 - 'last_updated': 'lastUpdated', # noqa: E501 - 'message': 'message', # noqa: E501 - 'phase': 'phase', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - conditions ([Condition]): [optional] # noqa: E501 - last_updated (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - conditions ([Condition]): [optional] # noqa: E501 - last_updated (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3.py deleted file mode 100644 index f4690ad6c20c..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_credentials import GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_endpoint import GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1S3(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'bucket': (str,), # noqa: E501 - 'credentials': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials,), # noqa: E501 - 'endpoint': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'region': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'bucket': 'bucket', # noqa: E501 - 'credentials': 'credentials', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'name': 'name', # noqa: E501 - 'region': 'region', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): [optional] # noqa: E501 - credentials (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials): [optional] # noqa: E501 - endpoint (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): [optional] # noqa: E501 - credentials (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials): [optional] # noqa: E501 - endpoint (GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_source.py deleted file mode 100644 index d7f357bedf49..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3 import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1S3'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'concurrency': (int,), # noqa: E501 - 'poll_period': (Duration,), # noqa: E501 - 's3': (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'concurrency': 'concurrency', # noqa: E501 - 'poll_period': 'pollPeriod', # noqa: E501 - 's3': 's3', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - concurrency (int): [optional] # noqa: E501 - poll_period (Duration): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - concurrency (int): [optional] # noqa: E501 - poll_period (Duration): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sasl.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sasl.py deleted file mode 100644 index 9d81d957c257..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sasl.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'mechanism': (str,), # noqa: E501 - 'password': (SecretKeySelector,), # noqa: E501 - 'user': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mechanism': 'mechanism', # noqa: E501 - 'password': 'password', # noqa: E501 - 'user': 'user', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mechanism (str): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - user (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mechanism (str): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - user (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_scale.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_scale.py deleted file mode 100644 index f3eebc0aa3bd..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_scale.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'desired_replicas': (str,), # noqa: E501 - 'peek_delay': (str,), # noqa: E501 - 'scaling_delay': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'desired_replicas': 'desiredReplicas', # noqa: E501 - 'peek_delay': 'peekDelay', # noqa: E501 - 'scaling_delay': 'scalingDelay', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - desired_replicas (str): An expression to determine the number of replicas. Must evaluation to an `int`.. [optional] # noqa: E501 - peek_delay (str): [optional] # noqa: E501 - scaling_delay (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - desired_replicas (str): An expression to determine the number of replicas. Must evaluation to an `int`.. [optional] # noqa: E501 - peek_delay (str): [optional] # noqa: E501 - scaling_delay (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sidecar.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sidecar.py deleted file mode 100644 index 3cc8baf0353f..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sidecar.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.resource_requirements import ResourceRequirements - globals()['ResourceRequirements'] = ResourceRequirements - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'resources': (ResourceRequirements,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'resources': 'resources', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - resources (ResourceRequirements): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - resources (ResourceRequirements): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sink.py deleted file mode 100644 index a15cde5104f2..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sink.py +++ /dev/null @@ -1,311 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_log import GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan import GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Log'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'db': (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink,), # noqa: E501 - 'dead_letter_queue': (bool,), # noqa: E501 - 'http': (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink,), # noqa: E501 - 'jetstream': (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink,), # noqa: E501 - 'kafka': (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink,), # noqa: E501 - 'log': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Log,), # noqa: E501 - 'name': (str,), # noqa: E501 - 's3': (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink,), # noqa: E501 - 'stan': (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN,), # noqa: E501 - 'volume': (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'db': 'db', # noqa: E501 - 'dead_letter_queue': 'deadLetterQueue', # noqa: E501 - 'http': 'http', # noqa: E501 - 'jetstream': 'jetstream', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'log': 'log', # noqa: E501 - 'name': 'name', # noqa: E501 - 's3': 's3', # noqa: E501 - 'stan': 'stan', # noqa: E501 - 'volume': 'volume', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - db (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink): [optional] # noqa: E501 - dead_letter_queue (bool): [optional] # noqa: E501 - http (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink): [optional] # noqa: E501 - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink): [optional] # noqa: E501 - log (GithubComArgoprojLabsArgoDataflowApiV1alpha1Log): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink): [optional] # noqa: E501 - stan (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN): [optional] # noqa: E501 - volume (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - db (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink): [optional] # noqa: E501 - dead_letter_queue (bool): [optional] # noqa: E501 - http (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink): [optional] # noqa: E501 - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink): [optional] # noqa: E501 - log (GithubComArgoprojLabsArgoDataflowApiV1alpha1Log): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink): [optional] # noqa: E501 - stan (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN): [optional] # noqa: E501 - volume (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_source.py deleted file mode 100644 index 3db3705953ff..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_source.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_backoff import GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cron import GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan import GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Source(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cron': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron,), # noqa: E501 - 'db': (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource,), # noqa: E501 - 'http': (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource,), # noqa: E501 - 'jetstream': (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource,), # noqa: E501 - 'kafka': (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'retry': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff,), # noqa: E501 - 's3': (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source,), # noqa: E501 - 'stan': (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN,), # noqa: E501 - 'volume': (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cron': 'cron', # noqa: E501 - 'db': 'db', # noqa: E501 - 'http': 'http', # noqa: E501 - 'jetstream': 'jetstream', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'name': 'name', # noqa: E501 - 'retry': 'retry', # noqa: E501 - 's3': 's3', # noqa: E501 - 'stan': 'stan', # noqa: E501 - 'volume': 'volume', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron): [optional] # noqa: E501 - db (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource): [optional] # noqa: E501 - http (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource): [optional] # noqa: E501 - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - retry (GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source): [optional] # noqa: E501 - stan (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN): [optional] # noqa: E501 - volume (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron): [optional] # noqa: E501 - db (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource): [optional] # noqa: E501 - http (GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource): [optional] # noqa: E501 - jetstream (GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource): [optional] # noqa: E501 - kafka (GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - retry (GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff): [optional] # noqa: E501 - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source): [optional] # noqa: E501 - stan (GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN): [optional] # noqa: E501 - volume (GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_action.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_action.py deleted file mode 100644 index b21ed5cdf6e4..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_action.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_statement import GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'on_error': (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement,), # noqa: E501 - 'on_record_not_found': (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement,), # noqa: E501 - 'statement': (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'on_error': 'onError', # noqa: E501 - 'on_record_not_found': 'onRecordNotFound', # noqa: E501 - 'statement': 'statement', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - on_error (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - on_record_not_found (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - statement (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - on_error (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - on_record_not_found (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - statement (GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_statement.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_statement.py deleted file mode 100644 index 6030be7bf85b..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_statement.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'args': ([str],), # noqa: E501 - 'sql': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'args': 'args', # noqa: E501 - 'sql': 'sql', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - sql (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - sql (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan.py deleted file mode 100644 index d29a0fe8322b..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth import GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth,), # noqa: E501 - 'cluster_id': (str,), # noqa: E501 - 'max_inflight': (int,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'nats_monitoring_url': (str,), # noqa: E501 - 'nats_url': (str,), # noqa: E501 - 'subject': (str,), # noqa: E501 - 'subject_prefix': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'cluster_id': 'clusterId', # noqa: E501 - 'max_inflight': 'maxInflight', # noqa: E501 - 'name': 'name', # noqa: E501 - 'nats_monitoring_url': 'natsMonitoringUrl', # noqa: E501 - 'nats_url': 'natsUrl', # noqa: E501 - 'subject': 'subject', # noqa: E501 - 'subject_prefix': 'subjectPrefix', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth): [optional] # noqa: E501 - cluster_id (str): [optional] # noqa: E501 - max_inflight (int): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - nats_monitoring_url (str): [optional] # noqa: E501 - nats_url (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - subject_prefix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth): [optional] # noqa: E501 - cluster_id (str): [optional] # noqa: E501 - max_inflight (int): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - nats_monitoring_url (str): [optional] # noqa: E501 - nats_url (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - subject_prefix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step.py deleted file mode 100644 index 151a65605861..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec import GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_status import GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - globals()['ObjectMeta'] = ObjectMeta - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Step(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec,), # noqa: E501 - 'status': (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec): [optional] # noqa: E501 - status (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec): [optional] # noqa: E501 - status (GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec.py deleted file mode 100644 index 496c6eb54fa6..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec.py +++ /dev/null @@ -1,393 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.affinity import Affinity - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cat import GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_code import GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_container import GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_dedupe import GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_expand import GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_filter import GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_flatten import GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_git import GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_group import GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_map import GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_metadata import GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_scale import GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sidecar import GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - from argo_workflows.model.local_object_reference import LocalObjectReference - from argo_workflows.model.toleration import Toleration - from argo_workflows.model.volume import Volume - globals()['Affinity'] = Affinity - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Code'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Container'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Git'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Group'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Map'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Source'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - globals()['LocalObjectReference'] = LocalObjectReference - globals()['Toleration'] = Toleration - globals()['Volume'] = Volume - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'affinity': (Affinity,), # noqa: E501 - 'cat': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat,), # noqa: E501 - 'code': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Code,), # noqa: E501 - 'container': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Container,), # noqa: E501 - 'dedupe': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe,), # noqa: E501 - 'expand': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand,), # noqa: E501 - 'filter': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter,), # noqa: E501 - 'flatten': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten,), # noqa: E501 - 'git': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Git,), # noqa: E501 - 'group': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Group,), # noqa: E501 - 'image_pull_secrets': ([LocalObjectReference],), # noqa: E501 - 'map': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Map,), # noqa: E501 - 'metadata': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'node_selector': ({str: (str,)},), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'restart_policy': (str,), # noqa: E501 - 'scale': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'sidecar': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar,), # noqa: E501 - 'sinks': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink],), # noqa: E501 - 'sources': ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Source],), # noqa: E501 - 'terminator': (bool,), # noqa: E501 - 'tolerations': ([Toleration],), # noqa: E501 - 'volumes': ([Volume],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'affinity': 'affinity', # noqa: E501 - 'cat': 'cat', # noqa: E501 - 'code': 'code', # noqa: E501 - 'container': 'container', # noqa: E501 - 'dedupe': 'dedupe', # noqa: E501 - 'expand': 'expand', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'flatten': 'flatten', # noqa: E501 - 'git': 'git', # noqa: E501 - 'group': 'group', # noqa: E501 - 'image_pull_secrets': 'imagePullSecrets', # noqa: E501 - 'map': 'map', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'name': 'name', # noqa: E501 - 'node_selector': 'nodeSelector', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'restart_policy': 'restartPolicy', # noqa: E501 - 'scale': 'scale', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'sidecar': 'sidecar', # noqa: E501 - 'sinks': 'sinks', # noqa: E501 - 'sources': 'sources', # noqa: E501 - 'terminator': 'terminator', # noqa: E501 - 'tolerations': 'tolerations', # noqa: E501 - 'volumes': 'volumes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (Affinity): [optional] # noqa: E501 - cat (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat): [optional] # noqa: E501 - code (GithubComArgoprojLabsArgoDataflowApiV1alpha1Code): [optional] # noqa: E501 - container (GithubComArgoprojLabsArgoDataflowApiV1alpha1Container): [optional] # noqa: E501 - dedupe (GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe): [optional] # noqa: E501 - expand (GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand): [optional] # noqa: E501 - filter (GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter): [optional] # noqa: E501 - flatten (GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten): [optional] # noqa: E501 - git (GithubComArgoprojLabsArgoDataflowApiV1alpha1Git): [optional] # noqa: E501 - group (GithubComArgoprojLabsArgoDataflowApiV1alpha1Group): [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - map (GithubComArgoprojLabsArgoDataflowApiV1alpha1Map): [optional] # noqa: E501 - metadata (GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - node_selector ({str: (str,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - restart_policy (str): [optional] # noqa: E501 - scale (GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - sidecar (GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar): [optional] # noqa: E501 - sinks ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink]): [optional] # noqa: E501 - sources ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Source]): [optional] # noqa: E501 - terminator (bool): [optional] # noqa: E501 - tolerations ([Toleration]): [optional] # noqa: E501 - volumes ([Volume]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (Affinity): [optional] # noqa: E501 - cat (GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat): [optional] # noqa: E501 - code (GithubComArgoprojLabsArgoDataflowApiV1alpha1Code): [optional] # noqa: E501 - container (GithubComArgoprojLabsArgoDataflowApiV1alpha1Container): [optional] # noqa: E501 - dedupe (GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe): [optional] # noqa: E501 - expand (GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand): [optional] # noqa: E501 - filter (GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter): [optional] # noqa: E501 - flatten (GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten): [optional] # noqa: E501 - git (GithubComArgoprojLabsArgoDataflowApiV1alpha1Git): [optional] # noqa: E501 - group (GithubComArgoprojLabsArgoDataflowApiV1alpha1Group): [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - map (GithubComArgoprojLabsArgoDataflowApiV1alpha1Map): [optional] # noqa: E501 - metadata (GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - node_selector ({str: (str,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - restart_policy (str): [optional] # noqa: E501 - scale (GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - sidecar (GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar): [optional] # noqa: E501 - sinks ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink]): [optional] # noqa: E501 - sources ([GithubComArgoprojLabsArgoDataflowApiV1alpha1Source]): [optional] # noqa: E501 - terminator (bool): [optional] # noqa: E501 - tolerations ([Toleration]): [optional] # noqa: E501 - volumes ([Volume]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_status.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_status.py deleted file mode 100644 index 46fdb5bb7ca0..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_status.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'last_scaled_at': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'selector': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'last_scaled_at': 'lastScaledAt', # noqa: E501 - 'message': 'message', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'reason': 'reason', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'selector': 'selector', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_scaled_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - reason (str): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_scaled_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - reason (str): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_sink.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_sink.py deleted file mode 100644 index 5c2851cee1cd..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_sink.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_volume_source': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_volume_source': 'abstractVolumeSource', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_volume_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_volume_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_source.py deleted file mode 100644 index 585f38b9667d..000000000000 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_source.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - globals()['Duration'] = Duration - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'abstract_volume_source': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource,), # noqa: E501 - 'concurrency': (int,), # noqa: E501 - 'poll_period': (Duration,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'abstract_volume_source': 'abstractVolumeSource', # noqa: E501 - 'concurrency': 'concurrency', # noqa: E501 - 'poll_period': 'pollPeriod', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_volume_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource): [optional] # noqa: E501 - concurrency (int): [optional] # noqa: E501 - poll_period (Duration): [optional] # noqa: E501 - read_only (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - abstract_volume_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource): [optional] # noqa: E501 - concurrency (int): [optional] # noqa: E501 - poll_period (Duration): [optional] # noqa: E501 - read_only (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_credentials.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py similarity index 87% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_credentials.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py index b21a14c94fc9..7e703684e2f2 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_credentials.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py @@ -29,12 +29,8 @@ from argo_workflows.exceptions import ApiAttributeError -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtGCStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -70,7 +66,6 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @@ -85,11 +80,10 @@ def openapi_types(): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { - 'access_key_id': (SecretKeySelector,), # noqa: E501 - 'secret_access_key': (SecretKeySelector,), # noqa: E501 - 'session_token': (SecretKeySelector,), # noqa: E501 + 'not_specified': (bool,), # noqa: E501 + 'pods_recouped': ({str: (bool,)},), # noqa: E501 + 'strategies_processed': ({str: (bool,)},), # noqa: E501 } @cached_property @@ -98,9 +92,9 @@ def discriminator(): attribute_map = { - 'access_key_id': 'accessKeyId', # noqa: E501 - 'secret_access_key': 'secretAccessKey', # noqa: E501 - 'session_token': 'sessionToken', # noqa: E501 + 'not_specified': 'notSpecified', # noqa: E501 + 'pods_recouped': 'podsRecouped', # noqa: E501 + 'strategies_processed': 'strategiesProcessed', # noqa: E501 } read_only_vars = { @@ -111,7 +105,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtGCStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,9 +138,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - access_key_id (SecretKeySelector): [optional] # noqa: E501 - secret_access_key (SecretKeySelector): [optional] # noqa: E501 - session_token (SecretKeySelector): [optional] # noqa: E501 + not_specified (bool): if this is true, we already checked to see if we need to do it and we don't. [optional] # noqa: E501 + pods_recouped ({str: (bool,)}): have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once. [optional] # noqa: E501 + strategies_processed ({str: (bool,)}): have Pods been started to perform this strategy? (enables us not to re-process what we've already done). [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -195,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtGCStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -228,9 +222,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - access_key_id (SecretKeySelector): [optional] # noqa: E501 - secret_access_key (SecretKeySelector): [optional] # noqa: E501 - session_token (SecretKeySelector): [optional] # noqa: E501 + not_specified (bool): if this is true, we already checked to see if we need to do it and we don't. [optional] # noqa: E501 + pods_recouped ({str: (bool,)}): have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once. [optional] # noqa: E501 + strategies_processed ({str: (bool,)}): have Pods been started to perform this strategy? (enables us not to re-process what we've already done). [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py index 5363fe1e7187..3f9726d44009 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py @@ -31,7 +31,9 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -40,7 +42,9 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact globals()['IoArgoprojWorkflowV1alpha1ArchiveStrategy'] = IoArgoprojWorkflowV1alpha1ArchiveStrategy + globals()['IoArgoprojWorkflowV1alpha1ArtifactGC'] = IoArgoprojWorkflowV1alpha1ArtifactGC globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -106,7 +110,10 @@ def openapi_types(): 'name': (str,), # noqa: E501 'archive': (IoArgoprojWorkflowV1alpha1ArchiveStrategy,), # noqa: E501 'archive_logs': (bool,), # noqa: E501 + 'artifact_gc': (IoArgoprojWorkflowV1alpha1ArtifactGC,), # noqa: E501 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 + 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 + 'deleted': (bool,), # noqa: E501 '_from': (str,), # noqa: E501 'from_expression': (str,), # noqa: E501 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 @@ -133,7 +140,10 @@ def discriminator(): 'name': 'name', # noqa: E501 'archive': 'archive', # noqa: E501 'archive_logs': 'archiveLogs', # noqa: E501 + 'artifact_gc': 'artifactGC', # noqa: E501 'artifactory': 'artifactory', # noqa: E501 + 'azure': 'azure', # noqa: E501 + 'deleted': 'deleted', # noqa: E501 '_from': 'from', # noqa: E501 'from_expression': 'fromExpression', # noqa: E501 'gcs': 'gcs', # noqa: E501 @@ -197,7 +207,10 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 + deleted (bool): Has this been deleted?. [optional] # noqa: E501 _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 @@ -300,7 +313,10 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 + deleted (bool): Has this been deleted?. [optional] # noqa: E501 _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_step_watch_event.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py similarity index 88% rename from sdks/python/client/argo_workflows/model/stream_result_of_pipeline_step_watch_event.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py index 11d43dab37c0..d1c89417d379 100644 --- a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_step_watch_event.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py @@ -30,13 +30,11 @@ def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.pipeline_step_watch_event import PipelineStepWatchEvent - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['PipelineStepWatchEvent'] = PipelineStepWatchEvent + from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata + globals()['IoArgoprojWorkflowV1alpha1Metadata'] = IoArgoprojWorkflowV1alpha1Metadata -class StreamResultOfPipelineStepWatchEvent(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactGC(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,8 +87,9 @@ def openapi_types(): """ lazy_import() return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (PipelineStepWatchEvent,), # noqa: E501 + 'pod_metadata': (IoArgoprojWorkflowV1alpha1Metadata,), # noqa: E501 + 'service_account_name': (str,), # noqa: E501 + 'strategy': (str,), # noqa: E501 } @cached_property @@ -99,8 +98,9 @@ def discriminator(): attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 + 'pod_metadata': 'podMetadata', # noqa: E501 + 'service_account_name': 'serviceAccountName', # noqa: E501 + 'strategy': 'strategy', # noqa: E501 } read_only_vars = { @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelineStepWatchEvent - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGC - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,8 +144,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelineStepWatchEvent): [optional] # noqa: E501 + pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 + service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 + strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -194,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelineStepWatchEvent - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGC - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,8 +228,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelineStepWatchEvent): [optional] # noqa: E501 + pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 + service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 + strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_expand.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py similarity index 91% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_expand.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py index e7696e76b21d..00776b1d6f01 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_expand.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec + globals()['IoArgoprojWorkflowV1alpha1ArtifactNodeSpec'] = IoArgoprojWorkflowV1alpha1ArtifactNodeSpec -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactGCSpec(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 + 'artifacts_by_node': ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)},), # noqa: E501 } @cached_property @@ -96,7 +96,7 @@ def discriminator(): attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 + 'artifacts_by_node': 'artifactsByNode', # noqa: E501 } read_only_vars = { @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGCSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 + artifacts_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}): ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGCSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 + artifacts_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}): ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cat.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py similarity index 91% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cat.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py index 6fb726dda33b..807265665677 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cat.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus + globals()['IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus'] = IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactGCStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'abstract_step': (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep,), # noqa: E501 + 'artifact_results_by_node': ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)},), # noqa: E501 } @cached_property @@ -96,7 +96,7 @@ def discriminator(): attribute_map = { - 'abstract_step': 'abstractStep', # noqa: E501 + 'artifact_results_by_node': 'artifactResultsByNode', # noqa: E501 } read_only_vars = { @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGCStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 + artifact_results_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}): ArtifactResultsByNode maps Node name to result. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactGCStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - abstract_step (GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep): [optional] # noqa: E501 + artifact_results_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}): ArtifactResultsByNode maps Node name to result. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py index c6441c6264c7..45d79555e0e0 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py @@ -31,6 +31,7 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -39,6 +40,7 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -103,6 +105,7 @@ def openapi_types(): return { 'archive_logs': (bool,), # noqa: E501 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 + 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 'git': (IoArgoprojWorkflowV1alpha1GitArtifact,), # noqa: E501 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifact,), # noqa: E501 @@ -120,6 +123,7 @@ def discriminator(): attribute_map = { 'archive_logs': 'archiveLogs', # noqa: E501 'artifactory': 'artifactory', # noqa: E501 + 'azure': 'azure', # noqa: E501 'gcs': 'gcs', # noqa: E501 'git': 'git', # noqa: E501 'hdfs': 'hdfs', # noqa: E501 @@ -172,6 +176,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 @@ -262,6 +267,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py similarity index 88% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_backoff.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py index 47fd75902f80..b1d45b3c7812 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_backoff.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py @@ -30,11 +30,13 @@ def lazy_import(): - from argo_workflows.model.duration import Duration - globals()['Duration'] = Duration + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation + globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact + globals()['IoArgoprojWorkflowV1alpha1ArtifactLocation'] = IoArgoprojWorkflowV1alpha1ArtifactLocation -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactNodeSpec(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,11 +89,8 @@ def openapi_types(): """ lazy_import() return { - 'factor_percentage': (int,), # noqa: E501 - 'cap': (Duration,), # noqa: E501 - 'duration': (Duration,), # noqa: E501 - 'jitter_percentage': (int,), # noqa: E501 - 'steps': (str,), # noqa: E501 + 'archive_location': (IoArgoprojWorkflowV1alpha1ArtifactLocation,), # noqa: E501 + 'artifacts': ({str: (IoArgoprojWorkflowV1alpha1Artifact,)},), # noqa: E501 } @cached_property @@ -100,11 +99,8 @@ def discriminator(): attribute_map = { - 'factor_percentage': 'FactorPercentage', # noqa: E501 - 'cap': 'cap', # noqa: E501 - 'duration': 'duration', # noqa: E501 - 'jitter_percentage': 'jitterPercentage', # noqa: E501 - 'steps': 'steps', # noqa: E501 + 'archive_location': 'archiveLocation', # noqa: E501 + 'artifacts': 'artifacts', # noqa: E501 } read_only_vars = { @@ -115,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -148,11 +144,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - factor_percentage (int): [optional] # noqa: E501 - cap (Duration): [optional] # noqa: E501 - duration (Duration): [optional] # noqa: E501 - jitter_percentage (int): [optional] # noqa: E501 - steps (str): [optional] # noqa: E501 + archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 + artifacts ({str: (IoArgoprojWorkflowV1alpha1Artifact,)}): Artifacts maps artifact name to Artifact description. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -201,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -234,11 +227,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - factor_percentage (int): [optional] # noqa: E501 - cap (Duration): [optional] # noqa: E501 - duration (Duration): [optional] # noqa: E501 - jitter_percentage (int): [optional] # noqa: E501 - steps (str): [optional] # noqa: E501 + archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 + artifacts ({str: (IoArgoprojWorkflowV1alpha1Artifact,)}): Artifacts maps artifact name to Artifact description. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py index 8d3dd0c7ef7d..6589c616d312 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py @@ -31,7 +31,9 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -40,7 +42,9 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact globals()['IoArgoprojWorkflowV1alpha1ArchiveStrategy'] = IoArgoprojWorkflowV1alpha1ArchiveStrategy + globals()['IoArgoprojWorkflowV1alpha1ArtifactGC'] = IoArgoprojWorkflowV1alpha1ArtifactGC globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact @@ -106,7 +110,10 @@ def openapi_types(): 'name': (str,), # noqa: E501 'archive': (IoArgoprojWorkflowV1alpha1ArchiveStrategy,), # noqa: E501 'archive_logs': (bool,), # noqa: E501 + 'artifact_gc': (IoArgoprojWorkflowV1alpha1ArtifactGC,), # noqa: E501 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 + 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 + 'deleted': (bool,), # noqa: E501 '_from': (str,), # noqa: E501 'from_expression': (str,), # noqa: E501 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 @@ -133,7 +140,10 @@ def discriminator(): 'name': 'name', # noqa: E501 'archive': 'archive', # noqa: E501 'archive_logs': 'archiveLogs', # noqa: E501 + 'artifact_gc': 'artifactGC', # noqa: E501 'artifactory': 'artifactory', # noqa: E501 + 'azure': 'azure', # noqa: E501 + 'deleted': 'deleted', # noqa: E501 '_from': 'from', # noqa: E501 'from_expression': 'fromExpression', # noqa: E501 'gcs': 'gcs', # noqa: E501 @@ -197,7 +207,10 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 + deleted (bool): Has this been deleted?. [optional] # noqa: E501 _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 @@ -300,7 +313,10 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 + deleted (bool): Has this been deleted?. [optional] # noqa: E501 _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py index 0c16795c7b59..28db4518e355 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py @@ -31,11 +31,13 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository + from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository + globals()['IoArgoprojWorkflowV1alpha1AzureArtifactRepository'] = IoArgoprojWorkflowV1alpha1AzureArtifactRepository globals()['IoArgoprojWorkflowV1alpha1GCSArtifactRepository'] = IoArgoprojWorkflowV1alpha1GCSArtifactRepository globals()['IoArgoprojWorkflowV1alpha1HDFSArtifactRepository'] = IoArgoprojWorkflowV1alpha1HDFSArtifactRepository globals()['IoArgoprojWorkflowV1alpha1OSSArtifactRepository'] = IoArgoprojWorkflowV1alpha1OSSArtifactRepository @@ -97,6 +99,7 @@ def openapi_types(): return { 'archive_logs': (bool,), # noqa: E501 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository,), # noqa: E501 + 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifactRepository,), # noqa: E501 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifactRepository,), # noqa: E501 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository,), # noqa: E501 'oss': (IoArgoprojWorkflowV1alpha1OSSArtifactRepository,), # noqa: E501 @@ -111,6 +114,7 @@ def discriminator(): attribute_map = { 'archive_logs': 'archiveLogs', # noqa: E501 'artifactory': 'artifactory', # noqa: E501 + 'azure': 'azure', # noqa: E501 'gcs': 'gcs', # noqa: E501 'hdfs': 'hdfs', # noqa: E501 'oss': 'oss', # noqa: E501 @@ -160,6 +164,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive_logs (bool): ArchiveLogs enables log archiving. [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifactRepository): [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifactRepository): [optional] # noqa: E501 hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository): [optional] # noqa: E501 oss (IoArgoprojWorkflowV1alpha1OSSArtifactRepository): [optional] # noqa: E501 @@ -247,6 +252,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) archive_logs (bool): ArchiveLogs enables log archiving. [optional] # noqa: E501 artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository): [optional] # noqa: E501 + azure (IoArgoprojWorkflowV1alpha1AzureArtifactRepository): [optional] # noqa: E501 gcs (IoArgoprojWorkflowV1alpha1GCSArtifactRepository): [optional] # noqa: E501 hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository): [optional] # noqa: E501 oss (IoArgoprojWorkflowV1alpha1OSSArtifactRepository): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_storage.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py similarity index 90% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_storage.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py index 003aa3eee9b8..54348a36e8ae 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_storage.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py @@ -30,7 +30,7 @@ -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactResult(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -82,7 +82,8 @@ def openapi_types(): """ return { 'name': (str,), # noqa: E501 - 'sub_path': (str,), # noqa: E501 + 'error': (str,), # noqa: E501 + 'success': (bool,), # noqa: E501 } @cached_property @@ -92,7 +93,8 @@ def discriminator(): attribute_map = { 'name': 'name', # noqa: E501 - 'sub_path': 'subPath', # noqa: E501 + 'error': 'error', # noqa: E501 + 'success': 'success', # noqa: E501 } read_only_vars = { @@ -102,8 +104,11 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage - a model defined in OpenAPI + def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1ArtifactResult - a model defined in OpenAPI + + Args: + name (str): Name is the name of the Artifact Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -136,8 +141,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - sub_path (str): [optional] # noqa: E501 + error (str): Error is an optional error message which should be set if Success==false. [optional] # noqa: E501 + success (bool): Success describes whether the deletion succeeded. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -165,6 +170,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ @@ -185,8 +191,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ]) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage - a model defined in OpenAPI + def __init__(self, name, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1ArtifactResult - a model defined in OpenAPI + + Args: + name (str): Name is the name of the Artifact Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -219,8 +228,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - sub_path (str): [optional] # noqa: E501 + error (str): Error is an optional error message which should be set if Success==false. [optional] # noqa: E501 + success (bool): Success describes whether the deletion succeeded. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -246,6 +255,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_sink.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py similarity index 92% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_sink.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py index fc0f8b927c74..b28bf80a6bf1 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_sink.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3 import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1S3'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult + globals()['IoArgoprojWorkflowV1alpha1ArtifactResult'] = IoArgoprojWorkflowV1alpha1ArtifactResult -class GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink(ModelNormal): +class IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 's3': (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3,), # noqa: E501 + 'artifact_results': ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)},), # noqa: E501 } @cached_property @@ -96,7 +96,7 @@ def discriminator(): attribute_map = { - 's3': 's3', # noqa: E501 + 'artifact_results': 'artifactResults', # noqa: E501 } read_only_vars = { @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3): [optional] # noqa: E501 + artifact_results ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}): ArtifactResults maps Artifact name to result of the deletion. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - s3 (GithubComArgoprojLabsArgoDataflowApiV1alpha1S3): [optional] # noqa: E501 + artifact_results ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}): ArtifactResults maps Artifact name to result of the deletion. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_tls.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py similarity index 83% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_tls.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py index c1638183f623..0c9f2fc6a238 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_tls.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS(ModelNormal): +class IoArgoprojWorkflowV1alpha1AzureArtifact(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,9 +87,11 @@ def openapi_types(): """ lazy_import() return { - 'ca_cert_secret': (SecretKeySelector,), # noqa: E501 - 'cert_secret': (SecretKeySelector,), # noqa: E501 - 'key_secret': (SecretKeySelector,), # noqa: E501 + 'blob': (str,), # noqa: E501 + 'container': (str,), # noqa: E501 + 'endpoint': (str,), # noqa: E501 + 'account_key_secret': (SecretKeySelector,), # noqa: E501 + 'use_sdk_creds': (bool,), # noqa: E501 } @cached_property @@ -98,9 +100,11 @@ def discriminator(): attribute_map = { - 'ca_cert_secret': 'caCertSecret', # noqa: E501 - 'cert_secret': 'certSecret', # noqa: E501 - 'key_secret': 'keySecret', # noqa: E501 + 'blob': 'blob', # noqa: E501 + 'container': 'container', # noqa: E501 + 'endpoint': 'endpoint', # noqa: E501 + 'account_key_secret': 'accountKeySecret', # noqa: E501 + 'use_sdk_creds': 'useSDKCreds', # noqa: E501 } read_only_vars = { @@ -110,8 +114,13 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - a model defined in OpenAPI + def _from_openapi_data(cls, blob, container, endpoint, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1AzureArtifact - a model defined in OpenAPI + + Args: + blob (str): Blob is the blob name (i.e., path) in the container where the artifact resides + container (str): Container is the container where resources will be stored + endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,9 +153,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - ca_cert_secret (SecretKeySelector): [optional] # noqa: E501 - cert_secret (SecretKeySelector): [optional] # noqa: E501 - key_secret (SecretKeySelector): [optional] # noqa: E501 + account_key_secret (SecretKeySelector): [optional] # noqa: E501 + use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -174,6 +182,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.blob = blob + self.container = container + self.endpoint = endpoint for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ @@ -194,8 +205,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ]) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - a model defined in OpenAPI + def __init__(self, blob, container, endpoint, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1AzureArtifact - a model defined in OpenAPI + + Args: + blob (str): Blob is the blob name (i.e., path) in the container where the artifact resides + container (str): Container is the container where resources will be stored + endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -228,9 +244,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - ca_cert_secret (SecretKeySelector): [optional] # noqa: E501 - cert_secret (SecretKeySelector): [optional] # noqa: E501 - key_secret (SecretKeySelector): [optional] # noqa: E501 + account_key_secret (SecretKeySelector): [optional] # noqa: E501 + use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -256,6 +271,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.blob = blob + self.container = container + self.endpoint = endpoint for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py new file mode 100644 index 000000000000..9d78245266fa --- /dev/null +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py @@ -0,0 +1,285 @@ +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 + + The version of the OpenAPI document: VERSION + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from argo_workflows.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from ..model_utils import OpenApiModel +from argo_workflows.exceptions import ApiAttributeError + + +def lazy_import(): + from argo_workflows.model.secret_key_selector import SecretKeySelector + globals()['SecretKeySelector'] = SecretKeySelector + + +class IoArgoprojWorkflowV1alpha1AzureArtifactRepository(ModelNormal): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + 'container': (str,), # noqa: E501 + 'endpoint': (str,), # noqa: E501 + 'account_key_secret': (SecretKeySelector,), # noqa: E501 + 'blob_name_format': (str,), # noqa: E501 + 'use_sdk_creds': (bool,), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'container': 'container', # noqa: E501 + 'endpoint': 'endpoint', # noqa: E501 + 'account_key_secret': 'accountKeySecret', # noqa: E501 + 'blob_name_format': 'blobNameFormat', # noqa: E501 + 'use_sdk_creds': 'useSDKCreds', # noqa: E501 + } + + read_only_vars = { + } + + _composed_schemas = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, container, endpoint, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1AzureArtifactRepository - a model defined in OpenAPI + + Args: + container (str): Container is the container where resources will be stored + endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + account_key_secret (SecretKeySelector): [optional] # noqa: E501 + blob_name_format (str): BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables. [optional] # noqa: E501 + use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.container = container + self.endpoint = endpoint + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + ]) + + @convert_js_args_to_python_args + def __init__(self, container, endpoint, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1AzureArtifactRepository - a model defined in OpenAPI + + Args: + container (str): Container is the container where resources will be stored + endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + account_key_secret (SecretKeySelector): [optional] # noqa: E501 + blob_name_format (str): BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables. [optional] # noqa: E501 + use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.container = container + self.endpoint = endpoint + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source_from.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py similarity index 93% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source_from.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py index bbda8678448f..afe6dadad3f2 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source_from.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom(ModelNormal): +class IoArgoprojWorkflowV1alpha1BasicAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,8 @@ def openapi_types(): """ lazy_import() return { - 'secret_key_ref': (SecretKeySelector,), # noqa: E501 + 'password_secret': (SecretKeySelector,), # noqa: E501 + 'username_secret': (SecretKeySelector,), # noqa: E501 } @cached_property @@ -96,7 +97,8 @@ def discriminator(): attribute_map = { - 'secret_key_ref': 'secretKeyRef', # noqa: E501 + 'password_secret': 'passwordSecret', # noqa: E501 + 'username_secret': 'usernameSecret', # noqa: E501 } read_only_vars = { @@ -107,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1BasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +142,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 + password_secret (SecretKeySelector): [optional] # noqa: E501 + username_secret (SecretKeySelector): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1BasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +225,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 + password_secret (SecretKeySelector): [optional] # noqa: E501 + username_secret (SecretKeySelector): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py similarity index 93% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header_source.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py index 1f32166a2b34..9c96631c85bf 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header_source.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource(ModelNormal): +class IoArgoprojWorkflowV1alpha1ClientCertAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,8 @@ def openapi_types(): """ lazy_import() return { - 'secret_key_ref': (SecretKeySelector,), # noqa: E501 + 'client_cert_secret': (SecretKeySelector,), # noqa: E501 + 'client_key_secret': (SecretKeySelector,), # noqa: E501 } @cached_property @@ -96,7 +97,8 @@ def discriminator(): attribute_map = { - 'secret_key_ref': 'secretKeyRef', # noqa: E501 + 'client_cert_secret': 'clientCertSecret', # noqa: E501 + 'client_key_secret': 'clientKeySecret', # noqa: E501 } read_only_vars = { @@ -107,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ClientCertAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +142,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 + client_cert_secret (SecretKeySelector): [optional] # noqa: E501 + client_key_secret (SecretKeySelector): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1ClientCertAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +225,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 + client_cert_secret (SecretKeySelector): [optional] # noqa: E501 + client_key_secret (SecretKeySelector): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/duration.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py similarity index 96% rename from sdks/python/client/argo_workflows/model/duration.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py index c609dfe4c762..1bccee3452c7 100644 --- a/sdks/python/client/argo_workflows/model/duration.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py @@ -30,7 +30,7 @@ -class Duration(ModelNormal): +class IoArgoprojWorkflowV1alpha1CollectEventRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -81,7 +81,7 @@ def openapi_types(): and the value is attribute type. """ return { - 'duration': (str,), # noqa: E501 + 'name': (str,), # noqa: E501 } @cached_property @@ -90,7 +90,7 @@ def discriminator(): attribute_map = { - 'duration': 'duration', # noqa: E501 + 'name': 'name', # noqa: E501 } read_only_vars = { @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Duration - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1CollectEventRequest - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -134,7 +134,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - duration (str): [optional] # noqa: E501 + name (str): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """Duration - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1CollectEventRequest - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -216,7 +216,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - duration (str): [optional] # noqa: E501 + name (str): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py index 0a7a4c40e7f3..aabd74c7823e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py @@ -203,12 +203,12 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 dependencies ([str]): [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 lifecycle (Lifecycle): [optional] # noqa: E501 liveness_probe (Probe): [optional] # noqa: E501 @@ -310,12 +310,12 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 dependencies ([str]): [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 lifecycle (Lifecycle): [optional] # noqa: E501 liveness_probe (Probe): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py index 27390ee657ef..5e82526e7bdd 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py @@ -86,6 +86,7 @@ def openapi_types(): 'groups': ([str],), # noqa: E501 'issuer': (str,), # noqa: E501 'service_account_name': (str,), # noqa: E501 + 'service_account_namespace': (str,), # noqa: E501 'subject': (str,), # noqa: E501 } @@ -100,6 +101,7 @@ def discriminator(): 'groups': 'groups', # noqa: E501 'issuer': 'issuer', # noqa: E501 'service_account_name': 'serviceAccountName', # noqa: E501 + 'service_account_namespace': 'serviceAccountNamespace', # noqa: E501 'subject': 'subject', # noqa: E501 } @@ -149,6 +151,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 groups ([str]): [optional] # noqa: E501 issuer (str): [optional] # noqa: E501 service_account_name (str): [optional] # noqa: E501 + service_account_namespace (str): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 """ @@ -236,6 +239,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 groups ([str]): [optional] # noqa: E501 issuer (str): [optional] # noqa: E501 service_account_name (str): [optional] # noqa: E501 + service_account_namespace (str): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py index 2a357f89cbea..782d3a16710b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py @@ -88,12 +88,14 @@ def openapi_types(): lazy_import() return { 'repo': (str,), # noqa: E501 + 'branch': (str,), # noqa: E501 'depth': (int,), # noqa: E501 'disable_submodules': (bool,), # noqa: E501 'fetch': ([str],), # noqa: E501 'insecure_ignore_host_key': (bool,), # noqa: E501 'password_secret': (SecretKeySelector,), # noqa: E501 'revision': (str,), # noqa: E501 + 'single_branch': (bool,), # noqa: E501 'ssh_private_key_secret': (SecretKeySelector,), # noqa: E501 'username_secret': (SecretKeySelector,), # noqa: E501 } @@ -105,12 +107,14 @@ def discriminator(): attribute_map = { 'repo': 'repo', # noqa: E501 + 'branch': 'branch', # noqa: E501 'depth': 'depth', # noqa: E501 'disable_submodules': 'disableSubmodules', # noqa: E501 'fetch': 'fetch', # noqa: E501 'insecure_ignore_host_key': 'insecureIgnoreHostKey', # noqa: E501 'password_secret': 'passwordSecret', # noqa: E501 'revision': 'revision', # noqa: E501 + 'single_branch': 'singleBranch', # noqa: E501 'ssh_private_key_secret': 'sshPrivateKeySecret', # noqa: E501 'username_secret': 'usernameSecret', # noqa: E501 } @@ -159,12 +163,14 @@ def _from_openapi_data(cls, repo, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + branch (str): Branch is the branch to fetch when `SingleBranch` is enabled. [optional] # noqa: E501 depth (int): Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip. [optional] # noqa: E501 disable_submodules (bool): DisableSubmodules disables submodules during git clone. [optional] # noqa: E501 fetch ([str]): Fetch specifies a number of refs that should be fetched before checkout. [optional] # noqa: E501 insecure_ignore_host_key (bool): InsecureIgnoreHostKey disables SSH strict host key checking during git clone. [optional] # noqa: E501 password_secret (SecretKeySelector): [optional] # noqa: E501 revision (str): Revision is the git commit, tag, branch to checkout. [optional] # noqa: E501 + single_branch (bool): SingleBranch enables single branch clone, using the `branch` parameter. [optional] # noqa: E501 ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 username_secret (SecretKeySelector): [optional] # noqa: E501 """ @@ -252,12 +258,14 @@ def __init__(self, repo, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + branch (str): Branch is the branch to fetch when `SingleBranch` is enabled. [optional] # noqa: E501 depth (int): Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip. [optional] # noqa: E501 disable_submodules (bool): DisableSubmodules disables submodules during git clone. [optional] # noqa: E501 fetch ([str]): Fetch specifies a number of refs that should be fetched before checkout. [optional] # noqa: E501 insecure_ignore_host_key (bool): InsecureIgnoreHostKey disables SSH strict host key checking during git clone. [optional] # noqa: E501 password_secret (SecretKeySelector): [optional] # noqa: E501 revision (str): Revision is the git commit, tag, branch to checkout. [optional] # noqa: E501 + single_branch (bool): SingleBranch enables single branch clone, using the `branch` parameter. [optional] # noqa: E501 ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 username_secret (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py index 325494bbc132..68290fca3342 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py @@ -30,7 +30,9 @@ def lazy_import(): + from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader + globals()['IoArgoprojWorkflowV1alpha1HTTPBodySource'] = IoArgoprojWorkflowV1alpha1HTTPBodySource globals()['IoArgoprojWorkflowV1alpha1HTTPHeader'] = IoArgoprojWorkflowV1alpha1HTTPHeader @@ -89,6 +91,7 @@ def openapi_types(): return { 'url': (str,), # noqa: E501 'body': (str,), # noqa: E501 + 'body_from': (IoArgoprojWorkflowV1alpha1HTTPBodySource,), # noqa: E501 'headers': ([IoArgoprojWorkflowV1alpha1HTTPHeader],), # noqa: E501 'insecure_skip_verify': (bool,), # noqa: E501 'method': (str,), # noqa: E501 @@ -104,6 +107,7 @@ def discriminator(): attribute_map = { 'url': 'url', # noqa: E501 'body': 'body', # noqa: E501 + 'body_from': 'bodyFrom', # noqa: E501 'headers': 'headers', # noqa: E501 'insecure_skip_verify': 'insecureSkipVerify', # noqa: E501 'method': 'method', # noqa: E501 @@ -156,8 +160,9 @@ def _from_openapi_data(cls, url, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) body (str): Body is content of the HTTP Request. [optional] # noqa: E501 + body_from (IoArgoprojWorkflowV1alpha1HTTPBodySource): [optional] # noqa: E501 headers ([IoArgoprojWorkflowV1alpha1HTTPHeader]): Headers are an optional list of headers to send with HTTP requests. [optional] # noqa: E501 - insecure_skip_verify (bool): insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 + insecure_skip_verify (bool): InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 method (str): Method is HTTP methods for HTTP Request. [optional] # noqa: E501 success_condition (str): SuccessCondition is an expression if evaluated to true is considered successful. [optional] # noqa: E501 timeout_seconds (int): TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds. [optional] # noqa: E501 @@ -247,8 +252,9 @@ def __init__(self, url, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) body (str): Body is content of the HTTP Request. [optional] # noqa: E501 + body_from (IoArgoprojWorkflowV1alpha1HTTPBodySource): [optional] # noqa: E501 headers ([IoArgoprojWorkflowV1alpha1HTTPHeader]): Headers are an optional list of headers to send with HTTP requests. [optional] # noqa: E501 - insecure_skip_verify (bool): insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 + insecure_skip_verify (bool): InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 method (str): Method is HTTP methods for HTTP Request. [optional] # noqa: E501 success_condition (str): SuccessCondition is an expression if evaluated to true is considered successful. [optional] # noqa: E501 timeout_seconds (int): TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py index 7b89ce06dcc9..de7abcb14d7c 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py @@ -31,6 +31,8 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header + from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth + globals()['IoArgoprojWorkflowV1alpha1HTTPAuth'] = IoArgoprojWorkflowV1alpha1HTTPAuth globals()['IoArgoprojWorkflowV1alpha1Header'] = IoArgoprojWorkflowV1alpha1Header @@ -88,6 +90,7 @@ def openapi_types(): lazy_import() return { 'url': (str,), # noqa: E501 + 'auth': (IoArgoprojWorkflowV1alpha1HTTPAuth,), # noqa: E501 'headers': ([IoArgoprojWorkflowV1alpha1Header],), # noqa: E501 } @@ -98,6 +101,7 @@ def discriminator(): attribute_map = { 'url': 'url', # noqa: E501 + 'auth': 'auth', # noqa: E501 'headers': 'headers', # noqa: E501 } @@ -145,6 +149,7 @@ def _from_openapi_data(cls, url, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + auth (IoArgoprojWorkflowV1alpha1HTTPAuth): [optional] # noqa: E501 headers ([IoArgoprojWorkflowV1alpha1Header]): Headers are an optional list of headers to send with HTTP requests for artifacts. [optional] # noqa: E501 """ @@ -231,6 +236,7 @@ def __init__(self, url, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + auth (IoArgoprojWorkflowV1alpha1HTTPAuth): [optional] # noqa: E501 headers ([IoArgoprojWorkflowV1alpha1Header]): Headers are an optional list of headers to send with HTTP requests for artifacts. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py similarity index 86% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py index c7b2a0dc934b..c266d5df5452 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py @@ -30,11 +30,15 @@ def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource + from argo_workflows.model.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth + from argo_workflows.model.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth + from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth + globals()['IoArgoprojWorkflowV1alpha1BasicAuth'] = IoArgoprojWorkflowV1alpha1BasicAuth + globals()['IoArgoprojWorkflowV1alpha1ClientCertAuth'] = IoArgoprojWorkflowV1alpha1ClientCertAuth + globals()['IoArgoprojWorkflowV1alpha1OAuth2Auth'] = IoArgoprojWorkflowV1alpha1OAuth2Auth -class GithubComArgoprojLabsArgoDataflowApiV1alpha1Database(ModelNormal): +class IoArgoprojWorkflowV1alpha1HTTPAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,8 +91,9 @@ def openapi_types(): """ lazy_import() return { - 'data_source': (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource,), # noqa: E501 - 'driver': (str,), # noqa: E501 + 'basic_auth': (IoArgoprojWorkflowV1alpha1BasicAuth,), # noqa: E501 + 'client_cert': (IoArgoprojWorkflowV1alpha1ClientCertAuth,), # noqa: E501 + 'oauth2': (IoArgoprojWorkflowV1alpha1OAuth2Auth,), # noqa: E501 } @cached_property @@ -97,8 +102,9 @@ def discriminator(): attribute_map = { - 'data_source': 'dataSource', # noqa: E501 - 'driver': 'driver', # noqa: E501 + 'basic_auth': 'basicAuth', # noqa: E501 + 'client_cert': 'clientCert', # noqa: E501 + 'oauth2': 'oauth2', # noqa: E501 } read_only_vars = { @@ -109,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1HTTPAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -142,8 +148,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - data_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource): [optional] # noqa: E501 - driver (str): [optional] # noqa: E501 + basic_auth (IoArgoprojWorkflowV1alpha1BasicAuth): [optional] # noqa: E501 + client_cert (IoArgoprojWorkflowV1alpha1ClientCertAuth): [optional] # noqa: E501 + oauth2 (IoArgoprojWorkflowV1alpha1OAuth2Auth): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -192,7 +199,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1HTTPAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -225,8 +232,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - data_source (GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource): [optional] # noqa: E501 - driver (str): [optional] # noqa: E501 + basic_auth (IoArgoprojWorkflowV1alpha1BasicAuth): [optional] # noqa: E501 + client_cert (IoArgoprojWorkflowV1alpha1ClientCertAuth): [optional] # noqa: E501 + oauth2 (IoArgoprojWorkflowV1alpha1OAuth2Auth): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_endpoint.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py similarity index 94% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_endpoint.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py index 9d1e1859169f..c52dc44debe3 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_endpoint.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py @@ -30,7 +30,7 @@ -class GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint(ModelNormal): +class IoArgoprojWorkflowV1alpha1HTTPBodySource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -58,6 +58,11 @@ class GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint(ModelNormal): } validations = { + ('bytes',): { + 'regex': { + 'pattern': r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', # noqa: E501 + }, + }, } @cached_property @@ -81,7 +86,7 @@ def openapi_types(): and the value is attribute type. """ return { - 'url': (str,), # noqa: E501 + 'bytes': (str,), # noqa: E501 } @cached_property @@ -90,7 +95,7 @@ def discriminator(): attribute_map = { - 'url': 'url', # noqa: E501 + 'bytes': 'bytes', # noqa: E501 } read_only_vars = { @@ -101,7 +106,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1HTTPBodySource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -134,7 +139,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - url (str): [optional] # noqa: E501 + bytes (str): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -183,7 +188,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint - a model defined in OpenAPI + """IoArgoprojWorkflowV1alpha1HTTPBodySource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -216,7 +221,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - url (str): [optional] # noqa: E501 + bytes (str): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py index 770a33a644ab..0f22b6f4644a 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py @@ -89,9 +89,9 @@ def openapi_types(): """ lazy_import() return { - 'template': (str,), # noqa: E501 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 'expression': (str,), # noqa: E501 + 'template': (str,), # noqa: E501 'template_ref': (IoArgoprojWorkflowV1alpha1TemplateRef,), # noqa: E501 } @@ -101,9 +101,9 @@ def discriminator(): attribute_map = { - 'template': 'template', # noqa: E501 'arguments': 'arguments', # noqa: E501 'expression': 'expression', # noqa: E501 + 'template': 'template', # noqa: E501 'template_ref': 'templateRef', # noqa: E501 } @@ -114,12 +114,9 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, template, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1LifecycleHook - a model defined in OpenAPI - Args: - template (str): Template is the name of the template to execute by the hook - Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -153,6 +150,7 @@ def _from_openapi_data(cls, template, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 + template (str): Template is the name of the template to execute by the hook. [optional] # noqa: E501 template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 """ @@ -181,7 +179,6 @@ def _from_openapi_data(cls, template, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.template = template for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ @@ -202,12 +199,9 @@ def _from_openapi_data(cls, template, *args, **kwargs): # noqa: E501 ]) @convert_js_args_to_python_args - def __init__(self, template, *args, **kwargs): # noqa: E501 + def __init__(self, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1LifecycleHook - a model defined in OpenAPI - Args: - template (str): Template is the name of the template to execute by the hook - Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -241,6 +235,7 @@ def __init__(self, template, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 + template (str): Template is the name of the template to execute by the hook. [optional] # noqa: E501 template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 """ @@ -267,7 +262,6 @@ def __init__(self, template, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.template = template for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py similarity index 92% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py index c055924d71b3..612aa3d3be7e 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.resource_requirements import ResourceRequirements - globals()['ResourceRequirements'] = ResourceRequirements + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact + globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact -class GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep(ModelNormal): +class IoArgoprojWorkflowV1alpha1ManifestFrom(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'resources': (ResourceRequirements,), # noqa: E501 + 'artifact': (IoArgoprojWorkflowV1alpha1Artifact,), # noqa: E501 } @cached_property @@ -96,7 +96,7 @@ def discriminator(): attribute_map = { - 'resources': 'resources', # noqa: E501 + 'artifact': 'artifact', # noqa: E501 } read_only_vars = { @@ -106,8 +106,11 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - a model defined in OpenAPI + def _from_openapi_data(cls, artifact, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1ManifestFrom - a model defined in OpenAPI + + Args: + artifact (IoArgoprojWorkflowV1alpha1Artifact): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +143,6 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - resources (ResourceRequirements): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -168,6 +170,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.artifact = artifact for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ @@ -188,8 +191,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ]) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep - a model defined in OpenAPI + def __init__(self, artifact, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1ManifestFrom - a model defined in OpenAPI + + Args: + artifact (IoArgoprojWorkflowV1alpha1Artifact): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +228,6 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - resources (ResourceRequirements): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -248,6 +253,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.artifact = artifact for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py new file mode 100644 index 000000000000..f9d4a61589fe --- /dev/null +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py @@ -0,0 +1,279 @@ +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 + + The version of the OpenAPI document: VERSION + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from argo_workflows.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from ..model_utils import OpenApiModel +from argo_workflows.exceptions import ApiAttributeError + + +def lazy_import(): + from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam + from argo_workflows.model.secret_key_selector import SecretKeySelector + globals()['IoArgoprojWorkflowV1alpha1OAuth2EndpointParam'] = IoArgoprojWorkflowV1alpha1OAuth2EndpointParam + globals()['SecretKeySelector'] = SecretKeySelector + + +class IoArgoprojWorkflowV1alpha1OAuth2Auth(ModelNormal): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + 'client_id_secret': (SecretKeySelector,), # noqa: E501 + 'client_secret_secret': (SecretKeySelector,), # noqa: E501 + 'endpoint_params': ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam],), # noqa: E501 + 'scopes': ([str],), # noqa: E501 + 'token_url_secret': (SecretKeySelector,), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'client_id_secret': 'clientIDSecret', # noqa: E501 + 'client_secret_secret': 'clientSecretSecret', # noqa: E501 + 'endpoint_params': 'endpointParams', # noqa: E501 + 'scopes': 'scopes', # noqa: E501 + 'token_url_secret': 'tokenURLSecret', # noqa: E501 + } + + read_only_vars = { + } + + _composed_schemas = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1OAuth2Auth - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + client_id_secret (SecretKeySelector): [optional] # noqa: E501 + client_secret_secret (SecretKeySelector): [optional] # noqa: E501 + endpoint_params ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]): [optional] # noqa: E501 + scopes ([str]): [optional] # noqa: E501 + token_url_secret (SecretKeySelector): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + ]) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1OAuth2Auth - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + client_id_secret (SecretKeySelector): [optional] # noqa: E501 + client_secret_secret (SecretKeySelector): [optional] # noqa: E501 + endpoint_params ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]): [optional] # noqa: E501 + scopes ([str]): [optional] # noqa: E501 + token_url_secret (SecretKeySelector): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py similarity index 92% rename from sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth.py rename to sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py index e97d2b086bad..e5fbddaf2344 100644 --- a/sdks/python/client/argo_workflows/model/github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py @@ -29,12 +29,8 @@ from argo_workflows.exceptions import ApiAttributeError -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - -class GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth(ModelNormal): +class IoArgoprojWorkflowV1alpha1OAuth2EndpointParam(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -70,7 +66,6 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @@ -85,9 +80,9 @@ def openapi_types(): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { - 'token': (SecretKeySelector,), # noqa: E501 + 'key': (str,), # noqa: E501 + 'value': (str,), # noqa: E501 } @cached_property @@ -96,7 +91,8 @@ def discriminator(): attribute_map = { - 'token': 'token', # noqa: E501 + 'key': 'key', # noqa: E501 + 'value': 'value', # noqa: E501 } read_only_vars = { @@ -106,8 +102,11 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - a model defined in OpenAPI + def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - a model defined in OpenAPI + + Args: + key (str): Name is the header name Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +139,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - token (SecretKeySelector): [optional] # noqa: E501 + value (str): Value is the literal value to use for the header. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -168,6 +167,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.key = key for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ @@ -188,8 +188,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ]) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - a model defined in OpenAPI + def __init__(self, key, *args, **kwargs): # noqa: E501 + """IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - a model defined in OpenAPI + + Args: + key (str): Name is the header name Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +225,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - token (SecretKeySelector): [optional] # noqa: E501 + value (str): Value is the literal value to use for the header. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -248,6 +251,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.key = key for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py index b1d1e1c30540..1fd960c88fd6 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py @@ -29,6 +29,10 @@ from argo_workflows.exceptions import ApiAttributeError +def lazy_import(): + from argo_workflows.model.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom + globals()['IoArgoprojWorkflowV1alpha1ManifestFrom'] = IoArgoprojWorkflowV1alpha1ManifestFrom + class IoArgoprojWorkflowV1alpha1ResourceTemplate(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. @@ -66,6 +70,7 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ + lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @@ -80,11 +85,13 @@ def openapi_types(): openapi_types (dict): The key is attribute name and the value is attribute type. """ + lazy_import() return { 'action': (str,), # noqa: E501 'failure_condition': (str,), # noqa: E501 'flags': ([str],), # noqa: E501 'manifest': (str,), # noqa: E501 + 'manifest_from': (IoArgoprojWorkflowV1alpha1ManifestFrom,), # noqa: E501 'merge_strategy': (str,), # noqa: E501 'set_owner_reference': (bool,), # noqa: E501 'success_condition': (str,), # noqa: E501 @@ -100,6 +107,7 @@ def discriminator(): 'failure_condition': 'failureCondition', # noqa: E501 'flags': 'flags', # noqa: E501 'manifest': 'manifest', # noqa: E501 + 'manifest_from': 'manifestFrom', # noqa: E501 'merge_strategy': 'mergeStrategy', # noqa: E501 'set_owner_reference': 'setOwnerReference', # noqa: E501 'success_condition': 'successCondition', # noqa: E501 @@ -152,6 +160,7 @@ def _from_openapi_data(cls, action, *args, **kwargs): # noqa: E501 failure_condition (str): FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed. [optional] # noqa: E501 flags ([str]): Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ]. [optional] # noqa: E501 manifest (str): Manifest contains the kubernetes manifest. [optional] # noqa: E501 + manifest_from (IoArgoprojWorkflowV1alpha1ManifestFrom): [optional] # noqa: E501 merge_strategy (str): MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json. [optional] # noqa: E501 set_owner_reference (bool): SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.. [optional] # noqa: E501 success_condition (str): SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step. [optional] # noqa: E501 @@ -243,6 +252,7 @@ def __init__(self, action, *args, **kwargs): # noqa: E501 failure_condition (str): FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed. [optional] # noqa: E501 flags ([str]): Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ]. [optional] # noqa: E501 manifest (str): Manifest contains the kubernetes manifest. [optional] # noqa: E501 + manifest_from (IoArgoprojWorkflowV1alpha1ManifestFrom): [optional] # noqa: E501 merge_strategy (str): MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json. [optional] # noqa: E501 set_owner_reference (bool): SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.. [optional] # noqa: E501 success_condition (str): SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py index f4fc3428ee7a..d478a417771b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py @@ -84,6 +84,7 @@ def openapi_types(): 'memoized': (bool,), # noqa: E501 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 + 'parameters': ([str],), # noqa: E501 'uid': (str,), # noqa: E501 } @@ -96,6 +97,7 @@ def discriminator(): 'memoized': 'memoized', # noqa: E501 'name': 'name', # noqa: E501 'namespace': 'namespace', # noqa: E501 + 'parameters': 'parameters', # noqa: E501 'uid': 'uid', # noqa: E501 } @@ -143,6 +145,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 memoized (bool): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 uid (str): [optional] # noqa: E501 """ @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 memoized (bool): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 uid (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py index cdb9740e3312..56c9e6ed4464 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py @@ -84,6 +84,7 @@ def openapi_types(): 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 'node_field_selector': (str,), # noqa: E501 + 'parameters': ([str],), # noqa: E501 'restart_successful': (bool,), # noqa: E501 'uid': (str,), # noqa: E501 } @@ -97,6 +98,7 @@ def discriminator(): 'name': 'name', # noqa: E501 'namespace': 'namespace', # noqa: E501 'node_field_selector': 'nodeFieldSelector', # noqa: E501 + 'parameters': 'parameters', # noqa: E501 'restart_successful': 'restartSuccessful', # noqa: E501 'uid': 'uid', # noqa: E501 } @@ -145,6 +147,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 node_field_selector (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 restart_successful (bool): [optional] # noqa: E501 uid (str): [optional] # noqa: E501 """ @@ -231,6 +234,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 node_field_selector (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 restart_successful (bool): [optional] # noqa: E501 uid (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py index 0d833a59f00a..f1628012da63 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py @@ -170,7 +170,7 @@ def _from_openapi_data(cls, image, source, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1ScriptTemplate - a model defined in OpenAPI Args: - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. source (str): Source contains the source code of the script to execute Keyword Args: @@ -204,8 +204,8 @@ def _from_openapi_data(cls, image, source, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 @@ -278,7 +278,7 @@ def __init__(self, image, source, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1ScriptTemplate - a model defined in OpenAPI Args: - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. source (str): Source contains the source code of the script to execute Keyword Args: @@ -312,8 +312,8 @@ def __init__(self, image, source, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py index 695082a06b5e..47c24e9287fe 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py @@ -203,11 +203,11 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 lifecycle (Lifecycle): [optional] # noqa: E501 liveness_probe (Probe): [optional] # noqa: E501 @@ -310,11 +310,11 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 + command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 lifecycle (Lifecycle): [optional] # noqa: E501 liveness_probe (Probe): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py index d89a95b01186..5d31e730e5e3 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py @@ -84,6 +84,7 @@ def openapi_types(): 'memoized': (bool,), # noqa: E501 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 + 'parameters': ([str],), # noqa: E501 } @cached_property @@ -95,6 +96,7 @@ def discriminator(): 'memoized': 'memoized', # noqa: E501 'name': 'name', # noqa: E501 'namespace': 'namespace', # noqa: E501 + 'parameters': 'parameters', # noqa: E501 } read_only_vars = { @@ -141,6 +143,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 memoized (bool): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -225,6 +228,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 memoized (bool): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py index c4398d6f6149..9ffccfe91d2b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py @@ -84,6 +84,7 @@ def openapi_types(): 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 'node_field_selector': (str,), # noqa: E501 + 'parameters': ([str],), # noqa: E501 'restart_successful': (bool,), # noqa: E501 } @@ -96,6 +97,7 @@ def discriminator(): 'name': 'name', # noqa: E501 'namespace': 'namespace', # noqa: E501 'node_field_selector': 'nodeFieldSelector', # noqa: E501 + 'parameters': 'parameters', # noqa: E501 'restart_successful': 'restartSuccessful', # noqa: E501 } @@ -143,6 +145,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 node_field_selector (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 restart_successful (bool): [optional] # noqa: E501 """ @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 node_field_selector (str): [optional] # noqa: E501 + parameters ([str]): [optional] # noqa: E501 restart_successful (bool): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py index aeff5868e775..601c55384381 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py @@ -33,6 +33,7 @@ def lazy_import(): from argo_workflows.model.affinity import Affinity from argo_workflows.model.host_alias import HostAlias from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments + from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from argo_workflows.model.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig from argo_workflows.model.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook @@ -56,6 +57,7 @@ def lazy_import(): globals()['Affinity'] = Affinity globals()['HostAlias'] = HostAlias globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments + globals()['IoArgoprojWorkflowV1alpha1ArtifactGC'] = IoArgoprojWorkflowV1alpha1ArtifactGC globals()['IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef'] = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef globals()['IoArgoprojWorkflowV1alpha1ExecutorConfig'] = IoArgoprojWorkflowV1alpha1ExecutorConfig globals()['IoArgoprojWorkflowV1alpha1LifecycleHook'] = IoArgoprojWorkflowV1alpha1LifecycleHook @@ -135,6 +137,7 @@ def openapi_types(): 'affinity': (Affinity,), # noqa: E501 'archive_logs': (bool,), # noqa: E501 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 + 'artifact_gc': (IoArgoprojWorkflowV1alpha1ArtifactGC,), # noqa: E501 'artifact_repository_ref': (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef,), # noqa: E501 'automount_service_account_token': (bool,), # noqa: E501 'dns_config': (PodDNSConfig,), # noqa: E501 @@ -184,6 +187,7 @@ def discriminator(): 'affinity': 'affinity', # noqa: E501 'archive_logs': 'archiveLogs', # noqa: E501 'arguments': 'arguments', # noqa: E501 + 'artifact_gc': 'artifactGC', # noqa: E501 'artifact_repository_ref': 'artifactRepositoryRef', # noqa: E501 'automount_service_account_token': 'automountServiceAccountToken', # noqa: E501 'dns_config': 'dnsConfig', # noqa: E501 @@ -268,6 +272,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 affinity (Affinity): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef): [optional] # noqa: E501 automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 dns_config (PodDNSConfig): [optional] # noqa: E501 @@ -285,7 +290,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 pod_disruption_budget (IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec): [optional] # noqa: E501 pod_gc (IoArgoprojWorkflowV1alpha1PodGC): [optional] # noqa: E501 pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_priority (int): Priority to apply to workflow pods.. [optional] # noqa: E501 + pod_priority (int): Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.. [optional] # noqa: E501 pod_priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 priority (int): Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.. [optional] # noqa: E501 @@ -390,6 +395,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 affinity (Affinity): [optional] # noqa: E501 archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 + artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef): [optional] # noqa: E501 automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 dns_config (PodDNSConfig): [optional] # noqa: E501 @@ -407,7 +413,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 pod_disruption_budget (IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec): [optional] # noqa: E501 pod_gc (IoArgoprojWorkflowV1alpha1PodGC): [optional] # noqa: E501 pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_priority (int): Priority to apply to workflow pods.. [optional] # noqa: E501 + pod_priority (int): Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.. [optional] # noqa: E501 pod_priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 priority (int): Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py index 269a0178b570..697f5fca46bc 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py @@ -30,6 +30,7 @@ def lazy_import(): + from argo_workflows.model.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus @@ -38,6 +39,7 @@ def lazy_import(): from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec from argo_workflows.model.volume import Volume + globals()['IoArgoprojWorkflowV1alpha1ArtGCStatus'] = IoArgoprojWorkflowV1alpha1ArtGCStatus globals()['IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus'] = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus globals()['IoArgoprojWorkflowV1alpha1Condition'] = IoArgoprojWorkflowV1alpha1Condition globals()['IoArgoprojWorkflowV1alpha1NodeStatus'] = IoArgoprojWorkflowV1alpha1NodeStatus @@ -101,6 +103,7 @@ def openapi_types(): """ lazy_import() return { + 'artifact_gc_status': (IoArgoprojWorkflowV1alpha1ArtGCStatus,), # noqa: E501 'artifact_repository_ref': (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus,), # noqa: E501 'compressed_nodes': (str,), # noqa: E501 'conditions': ([IoArgoprojWorkflowV1alpha1Condition],), # noqa: E501 @@ -126,6 +129,7 @@ def discriminator(): attribute_map = { + 'artifact_gc_status': 'artifactGCStatus', # noqa: E501 'artifact_repository_ref': 'artifactRepositoryRef', # noqa: E501 'compressed_nodes': 'compressedNodes', # noqa: E501 'conditions': 'conditions', # noqa: E501 @@ -186,6 +190,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + artifact_gc_status (IoArgoprojWorkflowV1alpha1ArtGCStatus): [optional] # noqa: E501 artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus): [optional] # noqa: E501 compressed_nodes (str): Compressed and base64 decoded Nodes map. [optional] # noqa: E501 conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the Workflow may have. [optional] # noqa: E501 @@ -284,6 +289,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + artifact_gc_status (IoArgoprojWorkflowV1alpha1ArtGCStatus): [optional] # noqa: E501 artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus): [optional] # noqa: E501 compressed_nodes (str): Compressed and base64 decoded Nodes map. [optional] # noqa: E501 conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the Workflow may have. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/pipeline_log_entry.py b/sdks/python/client/argo_workflows/model/pipeline_log_entry.py deleted file mode 100644 index 5e0bf13fb8fa..000000000000 --- a/sdks/python/client/argo_workflows/model/pipeline_log_entry.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - - -class PipelineLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'msg': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'pipeline_name': (str,), # noqa: E501 - 'step_name': (str,), # noqa: E501 - 'time': (datetime,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'msg': 'msg', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'pipeline_name': 'pipelineName', # noqa: E501 - 'step_name': 'stepName', # noqa: E501 - 'time': 'time', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PipelineLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - pipeline_name (str): [optional] # noqa: E501 - step_name (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PipelineLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - pipeline_name (str): [optional] # noqa: E501 - step_name (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pipeline_pipeline_watch_event.py b/sdks/python/client/argo_workflows/model/pipeline_pipeline_watch_event.py deleted file mode 100644 index 6172ea1242f4..000000000000 --- a/sdks/python/client/argo_workflows/model/pipeline_pipeline_watch_event.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline import GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - - -class PipelinePipelineWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'object': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object': 'object', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PipelinePipelineWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PipelinePipelineWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pipeline_step_watch_event.py b/sdks/python/client/argo_workflows/model/pipeline_step_watch_event.py deleted file mode 100644 index ecc699b317cf..000000000000 --- a/sdks/python/client/argo_workflows/model/pipeline_step_watch_event.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - globals()['GithubComArgoprojLabsArgoDataflowApiV1alpha1Step'] = GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - - -class PipelineStepWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'object': (GithubComArgoprojLabsArgoDataflowApiV1alpha1Step,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object': 'object', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PipelineStepWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (GithubComArgoprojLabsArgoDataflowApiV1alpha1Step): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PipelineStepWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (GithubComArgoprojLabsArgoDataflowApiV1alpha1Step): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_log_entry.py b/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_log_entry.py deleted file mode 100644 index baa43b10addf..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_log_entry.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.pipeline_log_entry import PipelineLogEntry - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['PipelineLogEntry'] = PipelineLogEntry - - -class StreamResultOfPipelineLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (PipelineLogEntry,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelineLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelineLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelineLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelineLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_pipeline_watch_event.py b/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_pipeline_watch_event.py deleted file mode 100644 index d6b46ef058da..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_pipeline_pipeline_watch_event.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.pipeline_pipeline_watch_event import PipelinePipelineWatchEvent - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['PipelinePipelineWatchEvent'] = PipelinePipelineWatchEvent - - -class StreamResultOfPipelinePipelineWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (PipelinePipelineWatchEvent,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelinePipelineWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelinePipelineWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfPipelinePipelineWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (PipelinePipelineWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/models/__init__.py b/sdks/python/client/argo_workflows/models/__init__.py index 9c6d1482919c..61650ad33252 100644 --- a/sdks/python/client/argo_workflows/models/__init__.py +++ b/sdks/python/client/argo_workflows/models/__init__.py @@ -17,7 +17,6 @@ from argo_workflows.model.capabilities import Capabilities from argo_workflows.model.ceph_fs_volume_source import CephFSVolumeSource from argo_workflows.model.cinder_volume_source import CinderVolumeSource -from argo_workflows.model.condition import Condition from argo_workflows.model.config_map_env_source import ConfigMapEnvSource from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector from argo_workflows.model.config_map_projection import ConfigMapProjection @@ -28,7 +27,6 @@ from argo_workflows.model.downward_api_projection import DownwardAPIProjection from argo_workflows.model.downward_api_volume_file import DownwardAPIVolumeFile from argo_workflows.model.downward_api_volume_source import DownwardAPIVolumeSource -from argo_workflows.model.duration import Duration from argo_workflows.model.empty_dir_volume_source import EmptyDirVolumeSource from argo_workflows.model.env_from_source import EnvFromSource from argo_workflows.model.env_var import EnvVar @@ -48,65 +46,6 @@ from argo_workflows.model.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource from argo_workflows.model.grpc_action import GRPCAction from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_credentials import GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_aws_endpoint import GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_abstract_volume_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_backoff import GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cat import GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_code import GithubComArgoprojLabsArgoDataflowApiV1alpha1Code -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_container import GithubComArgoprojLabsArgoDataflowApiV1alpha1Container -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_cron import GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_data_source_from import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_db_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_database import GithubComArgoprojLabsArgoDataflowApiV1alpha1Database -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_dedupe import GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_expand import GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_filter import GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_flatten import GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_git import GithubComArgoprojLabsArgoDataflowApiV1alpha1Git -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_group import GithubComArgoprojLabsArgoDataflowApiV1alpha1Group -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_header_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_http_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_interface import GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_jet_stream_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka import GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_config import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_net import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_kafka_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_log import GithubComArgoprojLabsArgoDataflowApiV1alpha1Log -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_map import GithubComArgoprojLabsArgoDataflowApiV1alpha1Map -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_metadata import GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_nats_auth import GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline import GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_spec import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_status import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3 import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_s3_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sasl import GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_action import GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sql_statement import GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_stan import GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_scale import GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sidecar import GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1Source -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step import GithubComArgoprojLabsArgoDataflowApiV1alpha1Step -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_spec import GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_step_status import GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_storage import GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_tls import GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_sink import GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_volume_source import GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource from argo_workflows.model.google_protobuf_any import GoogleProtobufAny from argo_workflows.model.group_version_resource import GroupVersionResource @@ -224,21 +163,33 @@ from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.model.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc_spec import IoArgoprojWorkflowV1alpha1ArtifactGCSpec +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc_status import IoArgoprojWorkflowV1alpha1ArtifactGCStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult +from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository +from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff +from argo_workflows.model.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth from argo_workflows.model.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache +from argo_workflows.model.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest +from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest from argo_workflows.model.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy @@ -268,6 +219,8 @@ from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth +from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource from argo_workflows.model.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header @@ -281,6 +234,7 @@ from argo_workflows.model.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link from argo_workflows.model.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest from argo_workflows.model.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.model.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata @@ -292,6 +246,8 @@ from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus +from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth +from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule @@ -382,9 +338,6 @@ from argo_workflows.model.persistent_volume_claim_template import PersistentVolumeClaimTemplate from argo_workflows.model.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource from argo_workflows.model.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource -from argo_workflows.model.pipeline_log_entry import PipelineLogEntry -from argo_workflows.model.pipeline_pipeline_watch_event import PipelinePipelineWatchEvent -from argo_workflows.model.pipeline_step_watch_event import PipelineStepWatchEvent from argo_workflows.model.pod_affinity import PodAffinity from argo_workflows.model.pod_affinity_term import PodAffinityTerm from argo_workflows.model.pod_anti_affinity import PodAntiAffinity @@ -420,9 +373,6 @@ from argo_workflows.model.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent -from argo_workflows.model.stream_result_of_pipeline_log_entry import StreamResultOfPipelineLogEntry -from argo_workflows.model.stream_result_of_pipeline_pipeline_watch_event import StreamResultOfPipelinePipelineWatchEvent -from argo_workflows.model.stream_result_of_pipeline_step_watch_event import StreamResultOfPipelineStepWatchEvent from argo_workflows.model.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry from argo_workflows.model.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent from argo_workflows.model.sysctl import Sysctl diff --git a/sdks/python/client/docs/ArchivedWorkflowServiceApi.md b/sdks/python/client/docs/ArchivedWorkflowServiceApi.md index bd2978f53a04..6df6be16e822 100644 --- a/sdks/python/client/docs/ArchivedWorkflowServiceApi.md +++ b/sdks/python/client/docs/ArchivedWorkflowServiceApi.md @@ -20,6 +20,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -32,9 +33,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) uid = "uid_example" # str | @@ -60,7 +71,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -83,6 +94,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -96,9 +108,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) uid = "uid_example" # str | @@ -124,7 +146,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -147,6 +169,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -160,9 +183,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) @@ -184,7 +217,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -207,6 +240,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -220,9 +254,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) @@ -265,7 +309,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -288,6 +332,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -301,9 +346,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) @@ -348,7 +403,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -371,6 +426,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -385,9 +441,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) uid = "uid_example" # str | @@ -395,6 +461,9 @@ with argo_workflows.ApiClient() as api_client: memoized=True, name="name_example", namespace="namespace_example", + parameters=[ + "parameters_example", + ], uid="uid_example", ) # IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest | @@ -420,7 +489,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -443,6 +512,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -457,9 +527,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) uid = "uid_example" # str | @@ -467,6 +547,9 @@ with argo_workflows.ApiClient() as api_client: name="name_example", namespace="namespace_example", node_field_selector="node_field_selector_example", + parameters=[ + "parameters_example", + ], restart_successful=True, uid="uid_example", ) # IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest | @@ -493,7 +576,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/ArtifactServiceApi.md b/sdks/python/client/docs/ArtifactServiceApi.md index 3d263433ab3c..e77dcbae8a86 100644 --- a/sdks/python/client/docs/ArtifactServiceApi.md +++ b/sdks/python/client/docs/ArtifactServiceApi.md @@ -4,19 +4,105 @@ All URIs are relative to *http://localhost:2746* Method | HTTP request | Description ------------- | ------------- | ------------- -[**get_input_artifact**](ArtifactServiceApi.md#get_input_artifact) | **GET** /input-artifacts/{namespace}/{name}/{podName}/{artifactName} | Get an input artifact. -[**get_input_artifact_by_uid**](ArtifactServiceApi.md#get_input_artifact_by_uid) | **GET** /input-artifacts-by-uid/{uid}/{podName}/{artifactName} | Get an input artifact by UID. -[**get_output_artifact**](ArtifactServiceApi.md#get_output_artifact) | **GET** /artifacts/{namespace}/{name}/{podName}/{artifactName} | Get an output artifact. -[**get_output_artifact_by_uid**](ArtifactServiceApi.md#get_output_artifact_by_uid) | **GET** /artifacts-by-uid/{uid}/{podName}/{artifactName} | Get an output artifact by UID. +[**get_artifact_file**](ArtifactServiceApi.md#get_artifact_file) | **GET** /artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName} | Get an artifact. +[**get_input_artifact**](ArtifactServiceApi.md#get_input_artifact) | **GET** /input-artifacts/{namespace}/{name}/{nodeId}/{artifactName} | Get an input artifact. +[**get_input_artifact_by_uid**](ArtifactServiceApi.md#get_input_artifact_by_uid) | **GET** /input-artifacts-by-uid/{uid}/{nodeId}/{artifactName} | Get an input artifact by UID. +[**get_output_artifact**](ArtifactServiceApi.md#get_output_artifact) | **GET** /artifacts/{namespace}/{name}/{nodeId}/{artifactName} | Get an output artifact. +[**get_output_artifact_by_uid**](ArtifactServiceApi.md#get_output_artifact_by_uid) | **GET** /artifacts-by-uid/{uid}/{nodeId}/{artifactName} | Get an output artifact by UID. +# **get_artifact_file** +> file_type get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, ) + +Get an artifact. + +### Example + +* Api Key Authentication (BearerToken): +```python +import time +import argo_workflows +from argo_workflows.api import artifact_service_api +from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from pprint import pprint +# Defining the host is optional and defaults to http://localhost:2746 +# See configuration.py for a list of all supported configuration parameters. +configuration = argo_workflows.Configuration( + host = "http://localhost:2746" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' + +# Enter a context with an instance of the API client +with argo_workflows.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = artifact_service_api.ArtifactServiceApi(api_client) + namespace = "namespace_example" # str | + id_discriminator = "workflow" # str | + id = "id_example" # str | + node_id = "nodeId_example" # str | + artifact_name = "artifactName_example" # str | + + # example passing only required values which don't have defaults set + try: + # Get an artifact. + api_response = api_instance.get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, ) + pprint(api_response) + except argo_workflows.ApiException as e: + print("Exception when calling ArtifactServiceApi->get_artifact_file: %s\n" % e) +``` + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **namespace** | **str**| | + **id_discriminator** | **str**| | + **id** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | + **artifact_discriminator** | **str**| | defaults to "outputs" + +### Return type + +**file_type** + +### Authorization + +[BearerToken](../README.md#BearerToken) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | An artifact file. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **get_input_artifact** -> get_input_artifact(namespace, name, pod_name, artifact_name) +> file_type get_input_artifact(namespace, name, node_id, artifact_name) Get an input artifact. ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -29,20 +115,31 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = artifact_service_api.ArtifactServiceApi(api_client) namespace = "namespace_example" # str | name = "name_example" # str | - pod_name = "podName_example" # str | + node_id = "nodeId_example" # str | artifact_name = "artifactName_example" # str | # example passing only required values which don't have defaults set try: # Get an input artifact. - api_instance.get_input_artifact(namespace, name, pod_name, artifact_name) + api_response = api_instance.get_input_artifact(namespace, name, node_id, artifact_name) + pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling ArtifactServiceApi->get_input_artifact: %s\n" % e) ``` @@ -54,16 +151,16 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **namespace** | **str**| | **name** | **str**| | - **pod_name** | **str**| | + **node_id** | **str**| | **artifact_name** | **str**| | ### Return type -void (empty response body) +**file_type** ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -80,12 +177,13 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_input_artifact_by_uid** -> get_input_artifact_by_uid(namespace, uid, pod_name, artifact_name) +> file_type get_input_artifact_by_uid(uid, node_id, artifact_name) Get an input artifact by UID. ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -98,20 +196,30 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = artifact_service_api.ArtifactServiceApi(api_client) - namespace = "namespace_example" # str | uid = "uid_example" # str | - pod_name = "podName_example" # str | + node_id = "nodeId_example" # str | artifact_name = "artifactName_example" # str | # example passing only required values which don't have defaults set try: # Get an input artifact by UID. - api_instance.get_input_artifact_by_uid(namespace, uid, pod_name, artifact_name) + api_response = api_instance.get_input_artifact_by_uid(uid, node_id, artifact_name) + pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling ArtifactServiceApi->get_input_artifact_by_uid: %s\n" % e) ``` @@ -121,18 +229,17 @@ with argo_workflows.ApiClient() as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | **uid** | **str**| | - **pod_name** | **str**| | + **node_id** | **str**| | **artifact_name** | **str**| | ### Return type -void (empty response body) +**file_type** ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -149,12 +256,13 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_output_artifact** -> get_output_artifact(namespace, name, pod_name, artifact_name) +> file_type get_output_artifact(namespace, name, node_id, artifact_name) Get an output artifact. ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -167,20 +275,31 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = artifact_service_api.ArtifactServiceApi(api_client) namespace = "namespace_example" # str | name = "name_example" # str | - pod_name = "podName_example" # str | + node_id = "nodeId_example" # str | artifact_name = "artifactName_example" # str | # example passing only required values which don't have defaults set try: # Get an output artifact. - api_instance.get_output_artifact(namespace, name, pod_name, artifact_name) + api_response = api_instance.get_output_artifact(namespace, name, node_id, artifact_name) + pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling ArtifactServiceApi->get_output_artifact: %s\n" % e) ``` @@ -192,16 +311,16 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **namespace** | **str**| | **name** | **str**| | - **pod_name** | **str**| | + **node_id** | **str**| | **artifact_name** | **str**| | ### Return type -void (empty response body) +**file_type** ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -218,12 +337,13 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_output_artifact_by_uid** -> get_output_artifact_by_uid(uid, pod_name, artifact_name) +> file_type get_output_artifact_by_uid(uid, node_id, artifact_name) Get an output artifact by UID. ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -236,19 +356,30 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = artifact_service_api.ArtifactServiceApi(api_client) uid = "uid_example" # str | - pod_name = "podName_example" # str | + node_id = "nodeId_example" # str | artifact_name = "artifactName_example" # str | # example passing only required values which don't have defaults set try: # Get an output artifact by UID. - api_instance.get_output_artifact_by_uid(uid, pod_name, artifact_name) + api_response = api_instance.get_output_artifact_by_uid(uid, node_id, artifact_name) + pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling ArtifactServiceApi->get_output_artifact_by_uid: %s\n" % e) ``` @@ -259,16 +390,16 @@ with argo_workflows.ApiClient() as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **uid** | **str**| | - **pod_name** | **str**| | + **node_id** | **str**| | **artifact_name** | **str**| | ### Return type -void (empty response body) +**file_type** ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md index 63c54db1d2b7..d9e90fc47717 100644 --- a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md @@ -19,6 +19,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -33,9 +34,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) body = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest( @@ -313,6 +324,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -326,6 +349,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -338,6 +373,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -351,6 +387,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -390,6 +427,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -490,6 +579,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -527,6 +628,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -540,6 +653,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -552,6 +677,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -565,6 +691,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -604,6 +731,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1092,6 +1271,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -1102,6 +1292,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1115,6 +1306,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1153,6 +1345,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1797,6 +2041,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -1810,6 +2066,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -1822,6 +2090,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1835,6 +2104,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1874,6 +2144,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1995,6 +2317,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2008,6 +2342,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2020,6 +2366,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2033,6 +2380,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2072,6 +2420,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2215,8 +2615,20 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, @@ -2228,6 +2640,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2240,6 +2664,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2253,6 +2678,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2292,6 +2718,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2384,6 +2862,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -2687,6 +3168,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2700,6 +3193,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2712,6 +3217,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2725,6 +3231,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2764,6 +3271,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2926,6 +3485,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2939,6 +3510,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2951,6 +3534,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2964,6 +3548,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3003,6 +3588,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -3117,78 +3754,315 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", ], ), http_get=HTTPGetAction( @@ -3726,6 +4600,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3739,6 +4625,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3751,6 +4649,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3764,6 +4663,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3803,10 +4703,62 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", ), ], url="url_example", @@ -3920,6 +4872,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3933,6 +4897,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3945,6 +4921,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3958,6 +4935,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3997,6 +4975,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -4751,6 +5781,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -4761,6 +5802,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -4774,6 +5816,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4812,6 +5855,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5456,6 +6551,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5469,6 +6576,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5481,6 +6600,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5494,6 +6614,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5533,6 +6654,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5654,6 +6827,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5667,6 +6852,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5679,6 +6876,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5692,6 +6890,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5731,6 +6930,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5874,6 +7125,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5887,6 +7150,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5899,6 +7174,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5912,6 +7188,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5951,6 +7228,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6043,6 +7372,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -6346,6 +7678,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6359,6 +7703,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6371,6 +7727,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6384,6 +7741,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6423,11 +7781,63 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), ], url="url_example", ), @@ -6585,6 +7995,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6598,6 +8020,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6610,6 +8044,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6623,6 +8058,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6662,6 +8098,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6776,97 +8264,334 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", ), - ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( + host="host_example", + port="port_example", + ), + ), pre_stop=LifecycleHandler( _exec=ExecAction( command=[ @@ -7385,6 +9110,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7398,6 +9135,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -7410,6 +9159,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7423,6 +9173,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7462,6 +9213,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -7579,6 +9382,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7592,9 +9407,21 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", service_account_key_secret=SecretKeySelector( @@ -7604,6 +9431,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7617,6 +9445,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7656,6 +9485,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -8728,7 +10609,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8751,6 +10632,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8763,9 +10645,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) name = "name_example" # str | @@ -8813,7 +10705,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8836,6 +10728,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8849,9 +10742,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) name = "name_example" # str | @@ -8887,7 +10790,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8910,6 +10813,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8924,9 +10828,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) body = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest( @@ -9204,6 +11118,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9217,6 +11143,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9229,6 +11167,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9242,6 +11181,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9281,6 +11221,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9381,6 +11373,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -9418,6 +11422,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9431,6 +11447,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9443,6 +11471,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9456,6 +11485,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9495,6 +11525,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9983,6 +12065,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -9993,6 +12086,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10006,6 +12100,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10044,6 +12139,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10688,6 +12835,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10701,6 +12860,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10713,6 +12884,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10726,6 +12898,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10765,6 +12938,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10886,6 +13111,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10899,6 +13136,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10911,6 +13160,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10924,6 +13174,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10963,15 +13214,67 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, name="name_example", optional=True, oss=IoArgoprojWorkflowV1alpha1OSSArtifact( @@ -11106,6 +13409,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11119,6 +13434,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11131,6 +13458,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11144,6 +13472,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11183,6 +13512,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11275,6 +13656,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -11578,6 +13962,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11591,6 +13987,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11603,6 +14011,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11616,6 +14025,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11655,6 +14065,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11817,6 +14279,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11830,6 +14304,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11842,6 +14328,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11855,6 +14342,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11894,6 +14382,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -12008,57 +14548,294 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, ), - secret_key_ref=SecretKeySelector( + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), ], env_from=[ EnvFromSource( @@ -12617,6 +15394,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12630,6 +15419,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12642,6 +15443,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12655,6 +15457,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12694,21 +15497,73 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", optional=True, ), bucket="bucket_example", @@ -12811,6 +15666,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12824,6 +15691,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12836,6 +15715,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12849,6 +15729,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12888,6 +15769,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -13642,6 +16575,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -13652,6 +16596,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -13665,6 +16610,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -13703,6 +16649,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14347,6 +17345,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14360,6 +17370,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14372,6 +17394,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14385,6 +17408,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14424,6 +17448,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14545,6 +17621,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14558,6 +17646,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14570,6 +17670,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14583,6 +17684,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14622,6 +17724,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14765,6 +17919,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14778,6 +17944,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14790,6 +17968,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14803,6 +17982,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14842,6 +18022,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14934,6 +18166,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -15237,6 +18472,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15250,8 +18497,20 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -15262,6 +18521,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15275,6 +18535,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15314,6 +18575,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15476,6 +18789,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15489,6 +18814,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15501,6 +18838,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15514,6 +18852,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15553,6 +18892,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15667,93 +19058,330 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", ), - tcp_socket=TCPSocketAction( + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( host="host_example", port="port_example", ), @@ -16276,6 +19904,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16289,6 +19929,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16301,6 +19953,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16314,6 +19967,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16353,6 +20007,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -16470,6 +20176,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16483,6 +20201,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16495,6 +20225,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16508,6 +20239,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16547,6 +20279,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -17619,7 +21403,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17642,6 +21426,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17655,9 +21440,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) @@ -17700,7 +21495,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17723,6 +21518,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17737,9 +21533,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) name = "name_example" # str | DEPRECATED: This field is ignored. @@ -18012,6 +21818,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18025,6 +21843,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18037,6 +21867,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18050,6 +21881,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18089,6 +21921,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18189,6 +22073,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -18226,6 +22122,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18239,6 +22147,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18251,6 +22171,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18264,6 +22185,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18303,6 +22225,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18791,6 +22765,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -18801,6 +22786,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18814,6 +22800,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18852,6 +22839,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19496,6 +23535,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19509,6 +23560,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19521,6 +23584,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19534,6 +23598,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19573,6 +23638,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19694,6 +23811,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19707,6 +23836,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19719,6 +23860,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19732,6 +23874,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19771,27 +23914,79 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( mark_deletion_after_days=1, mark_infrequent_access_after_days=1, @@ -19914,6 +24109,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19927,6 +24134,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19939,6 +24158,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19952,6 +24172,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19991,6 +24212,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20083,6 +24356,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -20386,6 +24662,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20399,6 +24687,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20411,6 +24711,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20424,6 +24725,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20463,6 +24765,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20625,6 +24979,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20638,6 +25004,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20650,6 +25028,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20663,6 +25042,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20702,6 +25082,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20816,47 +25248,284 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", divisor="divisor_example", resource="resource_example", ), @@ -21425,6 +26094,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21438,6 +26119,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21450,6 +26143,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21463,6 +26157,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21502,17 +26197,69 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, oss=IoArgoprojWorkflowV1alpha1OSSArtifact( access_key_secret=SecretKeySelector( key="key_example", @@ -21619,6 +26366,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21632,6 +26391,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21644,6 +26415,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21657,6 +26429,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21696,6 +26469,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -22450,6 +27275,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -22460,6 +27296,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22473,6 +27310,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22511,6 +27349,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23155,6 +28045,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23168,6 +28070,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23180,6 +28094,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23193,6 +28108,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23232,6 +28148,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23353,6 +28321,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23366,6 +28346,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23378,6 +28370,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23391,6 +28384,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23430,6 +28424,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23573,6 +28619,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23586,6 +28644,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23598,6 +28668,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23611,6 +28682,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23650,6 +28722,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23742,6 +28866,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -24045,6 +29172,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24058,18 +29197,348 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24083,6 +29552,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24116,12 +29586,64 @@ with argo_workflows.ApiClient() as api_client: name="name_example", optional=True, ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24194,6 +29716,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -24221,61 +29744,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -24284,6 +29768,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24297,6 +29793,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24309,6 +29817,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24322,6 +29831,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24361,6 +29871,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24432,49 +29994,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -25084,6 +30604,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25097,6 +30629,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25109,6 +30653,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25122,6 +30667,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25161,6 +30707,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -25278,6 +30876,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25291,6 +30901,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25303,6 +30925,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25316,6 +30939,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25355,6 +30979,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -26428,7 +32104,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/Condition.md b/sdks/python/client/docs/Condition.md deleted file mode 100644 index f0faf627559d..000000000000 --- a/sdks/python/client/docs/Condition.md +++ /dev/null @@ -1,18 +0,0 @@ -# Condition - -Condition contains details for one aspect of the current state of this API Resource. - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**last_transition_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | -**message** | **str** | message is a human readable message indicating details about the transition. This may be an empty string. | -**reason** | **str** | reason contains a programmatic identifier indicating the reason for the condition's last transition. Producers of specific condition types may define expected values and meanings for this field, and whether the values are considered a guaranteed API. The value should be a CamelCase string. This field may not be empty. | -**status** | **str** | status of the condition, one of True, False, Unknown. | -**type** | **str** | type of condition in CamelCase or in foo.example.com/CamelCase. | -**observed_generation** | **int** | observedGeneration represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date with respect to the current state of the instance. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/CronWorkflowServiceApi.md b/sdks/python/client/docs/CronWorkflowServiceApi.md index 69f75613efdd..779747041204 100644 --- a/sdks/python/client/docs/CronWorkflowServiceApi.md +++ b/sdks/python/client/docs/CronWorkflowServiceApi.md @@ -21,6 +21,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -35,9 +36,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -367,6 +378,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -380,6 +403,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -392,6 +427,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -405,6 +441,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -444,6 +481,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -544,6 +633,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -581,6 +682,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -594,6 +707,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -606,6 +731,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -619,6 +745,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -658,6 +785,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1146,6 +1325,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -1156,6 +1346,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1169,6 +1360,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1207,6 +1399,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1851,6 +2095,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -1864,6 +2120,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -1876,6 +2144,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1889,6 +2158,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1928,6 +2198,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2049,6 +2371,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2062,6 +2396,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2074,6 +2420,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2087,6 +2434,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2126,6 +2474,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2269,8 +2669,20 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, @@ -2282,6 +2694,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2294,6 +2718,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2307,6 +2732,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2346,6 +2772,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2438,6 +2916,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -2741,6 +3222,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2754,6 +3247,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2766,6 +3271,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2779,6 +3285,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2818,6 +3325,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2980,6 +3539,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2993,6 +3564,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3005,6 +3588,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3018,6 +3602,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3057,6 +3642,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -3171,78 +3808,315 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", ], ), http_get=HTTPGetAction( @@ -3780,6 +4654,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3793,6 +4679,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3805,6 +4703,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3818,6 +4717,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3857,10 +4757,62 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", ), ], url="url_example", @@ -3974,6 +4926,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3987,6 +4951,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3999,6 +4975,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -4012,6 +4989,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4051,6 +5029,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -4805,6 +5835,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -4815,6 +5856,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -4828,6 +5870,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4866,6 +5909,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5510,6 +6605,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5523,6 +6630,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5535,6 +6654,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5548,6 +6668,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5587,6 +6708,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5708,6 +6881,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5721,6 +6906,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5733,6 +6930,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5746,6 +6944,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5785,6 +6984,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5928,6 +7179,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5941,6 +7204,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5953,6 +7228,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5966,6 +7242,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6005,6 +7282,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6097,6 +7426,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -6400,6 +7732,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6413,6 +7757,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6425,6 +7781,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6438,6 +7795,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6477,11 +7835,63 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), ], url="url_example", ), @@ -6639,6 +8049,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6652,6 +8074,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6664,6 +8098,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6677,6 +8112,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6716,6 +8152,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6830,97 +8318,334 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", ), - ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( + host="host_example", + port="port_example", + ), + ), pre_stop=LifecycleHandler( _exec=ExecAction( command=[ @@ -7439,6 +9164,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7452,6 +9189,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -7464,6 +9213,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7477,6 +9227,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7516,6 +9267,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -7633,6 +9436,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7646,9 +9461,21 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", service_account_key_secret=SecretKeySelector( @@ -7658,6 +9485,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7671,6 +9499,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7710,6 +9539,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -8806,7 +10687,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8829,6 +10710,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8841,9 +10723,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -8893,7 +10785,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8916,6 +10808,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8929,9 +10822,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -8969,7 +10872,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8992,6 +10895,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -9006,9 +10910,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -9331,6 +11245,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9344,6 +11270,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9356,6 +11294,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9369,6 +11308,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9408,6 +11348,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9508,6 +11500,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -9545,6 +11549,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9558,6 +11574,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9570,6 +11598,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9583,6 +11612,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9622,6 +11652,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10110,6 +12192,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -10120,6 +12213,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10133,6 +12227,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10171,6 +12266,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10815,6 +12962,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10828,6 +12987,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10840,6 +13011,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10853,6 +13025,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10892,6 +13065,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11013,6 +13238,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11026,6 +13263,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11038,6 +13287,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11051,6 +13301,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11090,15 +13341,67 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, name="name_example", optional=True, oss=IoArgoprojWorkflowV1alpha1OSSArtifact( @@ -11233,6 +13536,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11246,6 +13561,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11258,6 +13585,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11271,6 +13599,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11310,6 +13639,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11402,6 +13783,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -11705,6 +14089,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11718,6 +14114,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11730,6 +14138,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11743,6 +14152,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11782,6 +14192,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11944,6 +14406,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11957,6 +14431,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11969,6 +14455,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11982,6 +14469,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12021,6 +14509,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -12135,57 +14675,294 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, ), - secret_key_ref=SecretKeySelector( + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), ], env_from=[ EnvFromSource( @@ -12744,6 +15521,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12757,6 +15546,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12769,6 +15570,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12782,6 +15584,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12821,21 +15624,73 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", optional=True, ), bucket="bucket_example", @@ -12938,6 +15793,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12951,6 +15818,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12963,6 +15842,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12976,6 +15856,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -13015,6 +15896,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -13769,6 +16702,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -13779,6 +16723,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -13792,6 +16737,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -13830,6 +16776,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14474,6 +17472,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14487,6 +17497,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14499,6 +17521,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14512,6 +17535,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14551,6 +17575,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14672,6 +17748,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14685,6 +17773,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14697,6 +17797,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14710,6 +17811,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14749,6 +17851,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14892,6 +18046,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14905,6 +18071,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14917,6 +18095,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14930,6 +18109,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14969,6 +18149,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15061,6 +18293,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -15364,6 +18599,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15377,8 +18624,20 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -15389,6 +18648,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15402,6 +18662,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15441,6 +18702,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15603,6 +18916,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15616,6 +18941,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15628,6 +18965,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15641,6 +18979,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15680,6 +19019,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15794,93 +19185,330 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", ), - tcp_socket=TCPSocketAction( + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( host="host_example", port="port_example", ), @@ -16403,6 +20031,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16416,6 +20056,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16428,6 +20080,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16441,6 +20094,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16480,6 +20134,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -16597,6 +20303,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16610,6 +20328,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16622,6 +20352,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16635,6 +20366,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16674,6 +20406,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -17770,7 +21554,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17793,6 +21577,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17806,9 +21591,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -17860,7 +21655,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17883,6 +21678,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17897,9 +21693,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -17932,7 +21738,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17955,6 +21761,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17969,9 +21776,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -18004,7 +21821,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -18027,6 +21844,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -18041,9 +21859,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -18367,6 +22195,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18380,6 +22220,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18392,6 +22244,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18405,6 +22258,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18444,6 +22298,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18544,6 +22450,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -18581,6 +22499,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18594,6 +22524,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18606,6 +22548,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18619,6 +22562,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18658,6 +22602,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19146,6 +23142,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -19156,6 +23163,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19169,6 +23177,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19207,6 +23216,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19851,6 +23912,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19864,6 +23937,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19876,6 +23961,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19889,6 +23975,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19928,6 +24015,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20049,6 +24188,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20062,6 +24213,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20074,6 +24237,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20087,6 +24251,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20126,23 +24291,75 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), bucket="bucket_example", create_bucket_if_not_present=True, endpoint="endpoint_example", @@ -20269,6 +24486,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20282,6 +24511,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20294,6 +24535,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20307,6 +24549,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20346,6 +24589,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20438,6 +24733,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -20741,6 +25039,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20754,6 +25064,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20766,6 +25088,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20779,6 +25102,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20818,6 +25142,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20980,6 +25356,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20993,6 +25381,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21005,6 +25405,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21018,6 +25419,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21057,6 +25459,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -21171,51 +25625,288 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, ), - secret_key_ref=SecretKeySelector( + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( key="key_example", name="name_example", optional=True, @@ -21780,6 +26471,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21793,6 +26496,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21805,6 +26520,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21818,6 +26534,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21857,18 +26574,70 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( access_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21974,6 +26743,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21987,6 +26768,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21999,6 +26792,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22012,6 +26806,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22051,6 +26846,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -22805,6 +27652,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -22815,6 +27673,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22828,6 +27687,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22866,6 +27726,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23510,6 +28422,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23523,6 +28447,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23535,6 +28471,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23548,6 +28485,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23587,6 +28525,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23708,6 +28698,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23721,6 +28723,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23733,6 +28747,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23746,6 +28761,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23785,6 +28801,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23928,6 +28996,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23941,6 +29021,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23953,6 +29045,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23966,6 +29059,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24005,6 +29099,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24097,6 +29243,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -24400,8 +29549,337 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, @@ -24413,6 +29891,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24425,6 +29915,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24438,6 +29929,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24471,12 +29963,64 @@ with argo_workflows.ApiClient() as api_client: name="name_example", optional=True, ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24549,6 +30093,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -24576,61 +30121,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -24639,6 +30145,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24652,6 +30170,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24664,6 +30194,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24677,6 +30208,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24716,6 +30248,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24787,49 +30371,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -25439,6 +30981,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25452,6 +31006,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25464,6 +31030,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25477,6 +31044,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25516,6 +31084,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -25633,6 +31253,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25646,6 +31278,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25658,6 +31302,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25671,6 +31316,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25710,6 +31356,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -26808,7 +32506,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/EventServiceApi.md b/sdks/python/client/docs/EventServiceApi.md index 260580441e43..70366184e1ee 100644 --- a/sdks/python/client/docs/EventServiceApi.md +++ b/sdks/python/client/docs/EventServiceApi.md @@ -15,6 +15,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -28,9 +29,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_service_api.EventServiceApi(api_client) namespace = "namespace_example" # str | @@ -82,7 +93,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -105,6 +116,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -117,9 +129,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_service_api.EventServiceApi(api_client) namespace = "namespace_example" # str | The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. @@ -149,7 +171,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/EventSourceServiceApi.md b/sdks/python/client/docs/EventSourceServiceApi.md index c612d2e6ada6..133be663fc38 100644 --- a/sdks/python/client/docs/EventSourceServiceApi.md +++ b/sdks/python/client/docs/EventSourceServiceApi.md @@ -20,6 +20,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -34,9 +35,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -2216,7 +2227,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2239,6 +2250,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2251,9 +2263,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -2303,7 +2325,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2326,6 +2348,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2339,9 +2362,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -2403,7 +2436,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2426,6 +2459,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2439,9 +2473,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -2469,7 +2513,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2492,6 +2536,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2505,9 +2550,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -2559,7 +2614,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2582,6 +2637,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2596,9 +2652,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -4781,7 +4847,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -4804,6 +4870,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -4817,9 +4884,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | @@ -4871,7 +4948,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md deleted file mode 100644 index 634bdce54f24..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md +++ /dev/null @@ -1,40 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**aws_elastic_block_store** | [**AWSElasticBlockStoreVolumeSource**](AWSElasticBlockStoreVolumeSource.md) | | [optional] -**azure_disk** | [**AzureDiskVolumeSource**](AzureDiskVolumeSource.md) | | [optional] -**azure_file** | [**AzureFileVolumeSource**](AzureFileVolumeSource.md) | | [optional] -**cephfs** | [**CephFSVolumeSource**](CephFSVolumeSource.md) | | [optional] -**cinder** | [**CinderVolumeSource**](CinderVolumeSource.md) | | [optional] -**config_map** | [**ConfigMapVolumeSource**](ConfigMapVolumeSource.md) | | [optional] -**csi** | [**CSIVolumeSource**](CSIVolumeSource.md) | | [optional] -**downward_api** | [**DownwardAPIVolumeSource**](DownwardAPIVolumeSource.md) | | [optional] -**empty_dir** | [**EmptyDirVolumeSource**](EmptyDirVolumeSource.md) | | [optional] -**ephemeral** | [**EphemeralVolumeSource**](EphemeralVolumeSource.md) | | [optional] -**fc** | [**FCVolumeSource**](FCVolumeSource.md) | | [optional] -**flex_volume** | [**FlexVolumeSource**](FlexVolumeSource.md) | | [optional] -**flocker** | [**FlockerVolumeSource**](FlockerVolumeSource.md) | | [optional] -**gce_persistent_disk** | [**GCEPersistentDiskVolumeSource**](GCEPersistentDiskVolumeSource.md) | | [optional] -**git_repo** | [**GitRepoVolumeSource**](GitRepoVolumeSource.md) | | [optional] -**glusterfs** | [**GlusterfsVolumeSource**](GlusterfsVolumeSource.md) | | [optional] -**host_path** | [**HostPathVolumeSource**](HostPathVolumeSource.md) | | [optional] -**iscsi** | [**ISCSIVolumeSource**](ISCSIVolumeSource.md) | | [optional] -**nfs** | [**NFSVolumeSource**](NFSVolumeSource.md) | | [optional] -**persistent_volume_claim** | [**PersistentVolumeClaimVolumeSource**](PersistentVolumeClaimVolumeSource.md) | | [optional] -**photon_persistent_disk** | [**PhotonPersistentDiskVolumeSource**](PhotonPersistentDiskVolumeSource.md) | | [optional] -**portworx_volume** | [**PortworxVolumeSource**](PortworxVolumeSource.md) | | [optional] -**projected** | [**ProjectedVolumeSource**](ProjectedVolumeSource.md) | | [optional] -**quobyte** | [**QuobyteVolumeSource**](QuobyteVolumeSource.md) | | [optional] -**rbd** | [**RBDVolumeSource**](RBDVolumeSource.md) | | [optional] -**scale_io** | [**ScaleIOVolumeSource**](ScaleIOVolumeSource.md) | | [optional] -**secret** | [**SecretVolumeSource**](SecretVolumeSource.md) | | [optional] -**storageos** | [**StorageOSVolumeSource**](StorageOSVolumeSource.md) | | [optional] -**vsphere_volume** | [**VsphereVirtualDiskVolumeSource**](VsphereVirtualDiskVolumeSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md deleted file mode 100644 index b06742558f91..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md +++ /dev/null @@ -1,16 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**factor_percentage** | **int** | | [optional] -**cap** | [**Duration**](Duration.md) | | [optional] -**duration** | [**Duration**](Duration.md) | | [optional] -**jitter_percentage** | **int** | | [optional] -**steps** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md deleted file mode 100644 index bfc92f019b84..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md deleted file mode 100644 index 6440a8a842a7..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Code - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**image** | **str** | Image is used in preference to Runtime. | [optional] -**runtime** | **str** | | [optional] -**source** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md deleted file mode 100644 index 3df19e956ae3..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md +++ /dev/null @@ -1,18 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Container - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**args** | **[str]** | | [optional] -**command** | **[str]** | | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | | [optional] -**image** | **str** | | [optional] -**_in** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md) | | [optional] -**resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md deleted file mode 100644 index 89d328a49094..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**value** | **str** | | [optional] -**value_from** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md deleted file mode 100644 index 4dd7f6ddc8e6..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSourceFrom - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**secret_key_ref** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md deleted file mode 100644 index 5dcf10b5c1a6..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**actions** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md) | | [optional] -**database** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Database**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md deleted file mode 100644 index 27d3584c2044..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md +++ /dev/null @@ -1,17 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**commit_interval** | [**Duration**](Duration.md) | | [optional] -**database** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Database**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md) | | [optional] -**init_schema** | **bool** | | [optional] -**offset_column** | **str** | | [optional] -**poll_interval** | [**Duration**](Duration.md) | | [optional] -**query** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md deleted file mode 100644 index eca394299262..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Database.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Database - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**data_source** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBDataSource.md) | | [optional] -**driver** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md deleted file mode 100644 index 7456c3c86704..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**max_size** | **str** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**uid** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md deleted file mode 100644 index 74fa51a9e8e2..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md deleted file mode 100644 index a9d258f8faf3..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**expression** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md deleted file mode 100644 index 862ab0a6961b..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md deleted file mode 100644 index 7c80d2e708c1..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md +++ /dev/null @@ -1,21 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Git - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**branch** | **str** | | [optional] -**command** | **[str]** | | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | | [optional] -**image** | **str** | | [optional] -**insecure_ignore_host_key** | **bool** | | [optional] -**password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**path** | **str** | +kubebuilder:default=. | [optional] -**ssh_private_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**url** | **str** | | [optional] -**username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md deleted file mode 100644 index a25ce45e3c7c..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md +++ /dev/null @@ -1,15 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Group - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**end_of_group** | **str** | | [optional] -**format** | **str** | | [optional] -**key** | **str** | | [optional] -**storage** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md deleted file mode 100644 index 14c0c4b3f664..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**name** | **str** | | [optional] -**value** | **str** | | [optional] -**value_from** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md deleted file mode 100644 index c2924905bc51..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeaderSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**secret_key_ref** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md deleted file mode 100644 index a8826820c19b..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**headers** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPHeader.md) | | [optional] -**insecure_skip_verify** | **bool** | | [optional] -**url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md deleted file mode 100644 index a077e82334e5..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Interface - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fifo** | **bool** | | [optional] -**http** | **bool, date, datetime, dict, float, int, list, str, none_type** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md deleted file mode 100644 index 9f7bb3206820..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md +++ /dev/null @@ -1,15 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth**](GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md) | | [optional] -**name** | **str** | | [optional] -**nats_url** | **str** | | [optional] -**subject** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md deleted file mode 100644 index 71c921fa8a4e..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md deleted file mode 100644 index 008769f65787..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStream.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md deleted file mode 100644 index 8c34b8e53532..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**kafka_config** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md) | | [optional] -**name** | **str** | | [optional] -**topic** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md deleted file mode 100644 index 8b8c11eeb986..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaConfig - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**brokers** | **[str]** | | [optional] -**max_message_bytes** | **int** | | [optional] -**net** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md deleted file mode 100644 index 0d9ed79afb0e..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md +++ /dev/null @@ -1,20 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**acks** | **str** | | [optional] -**_async** | **bool** | | [optional] -**batch_size** | **str** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**compression_type** | **str** | | [optional] -**enable_idempotence** | **bool** | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md) | | [optional] -**linger** | [**Duration**](Duration.md) | | [optional] -**max_inflight** | **int** | | [optional] -**message_timeout** | [**Duration**](Duration.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md deleted file mode 100644 index 74528423533e..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md +++ /dev/null @@ -1,16 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fetch_min** | **str** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**fetch_wait_max** | [**Duration**](Duration.md) | | [optional] -**group_id** | **str** | GroupID is the consumer group ID. If not specified, a unique deterministic group ID is generated. | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Kafka.md) | | [optional] -**start_offset** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md deleted file mode 100644 index 30ecfc043080..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Log - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**truncate** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md deleted file mode 100644 index e60d5ba7f989..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Map - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_step** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md) | | [optional] -**expression** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md deleted file mode 100644 index ed62169bee57..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**annotations** | **{str: (str,)}** | | [optional] -**labels** | **{str: (str,)}** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md deleted file mode 100644 index e11c137bb502..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md deleted file mode 100644 index 973dbde84b57..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**spec** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md) | | [optional] -**status** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md deleted file mode 100644 index 218610e2ec04..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**items** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) | | [optional] -**metadata** | [**ListMeta**](ListMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md deleted file mode 100644 index f1fd242c6628..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineSpec - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**deletion_delay** | [**Duration**](Duration.md) | | [optional] -**steps** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md deleted file mode 100644 index 31312f8c9358..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus.md +++ /dev/null @@ -1,15 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineStatus - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**conditions** | [**[Condition]**](Condition.md) | | [optional] -**last_updated** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**message** | **str** | | [optional] -**phase** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md deleted file mode 100644 index 311640ecb449..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md +++ /dev/null @@ -1,16 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3 - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**bucket** | **str** | | [optional] -**credentials** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md) | | [optional] -**endpoint** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md) | | [optional] -**name** | **str** | | [optional] -**region** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md deleted file mode 100644 index 1607eb6b46cc..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md deleted file mode 100644 index 866cfa4ea731..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**concurrency** | **int** | | [optional] -**poll_period** | [**Duration**](Duration.md) | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md deleted file mode 100644 index b6e07f26f1fd..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md +++ /dev/null @@ -1,13 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**args** | **[str]** | | [optional] -**sql** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md deleted file mode 100644 index 375fb19d15e4..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md +++ /dev/null @@ -1,19 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth**](GithubComArgoprojLabsArgoDataflowApiV1alpha1NATSAuth.md) | | [optional] -**cluster_id** | **str** | | [optional] -**max_inflight** | **int** | | [optional] -**name** | **str** | | [optional] -**nats_monitoring_url** | **str** | | [optional] -**nats_url** | **str** | | [optional] -**subject** | **str** | | [optional] -**subject_prefix** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md deleted file mode 100644 index e3d0d3aac22e..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**desired_replicas** | **str** | An expression to determine the number of replicas. Must evaluation to an `int`. | [optional] -**peek_delay** | **str** | | [optional] -**scaling_delay** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md deleted file mode 100644 index 4a562980176c..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md +++ /dev/null @@ -1,21 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**db** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSink.md) | | [optional] -**dead_letter_queue** | **bool** | | [optional] -**http** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSink.md) | | [optional] -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSink.md) | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSink.md) | | [optional] -**log** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Log**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Log.md) | | [optional] -**name** | **str** | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Sink.md) | | [optional] -**stan** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN**](GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md) | | [optional] -**volume** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink**](GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md deleted file mode 100644 index dbf1c853a7a4..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md +++ /dev/null @@ -1,21 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Source - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**cron** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md) | | [optional] -**db** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1DBSource.md) | | [optional] -**http** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md) | | [optional] -**jetstream** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1JetStreamSource.md) | | [optional] -**kafka** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaSource.md) | | [optional] -**name** | **str** | | [optional] -**retry** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Backoff.md) | | [optional] -**s3** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source**](GithubComArgoprojLabsArgoDataflowApiV1alpha1S3Source.md) | | [optional] -**stan** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN**](GithubComArgoprojLabsArgoDataflowApiV1alpha1STAN.md) | | [optional] -**volume** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md deleted file mode 100644 index 83d585bb4fdd..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Step - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**spec** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md) | | [optional] -**status** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus**](GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md deleted file mode 100644 index 51e19b569c0a..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec.md +++ /dev/null @@ -1,36 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1StepSpec - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**affinity** | [**Affinity**](Affinity.md) | | [optional] -**cat** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Cat.md) | | [optional] -**code** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Code**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Code.md) | | [optional] -**container** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Container**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Container.md) | | [optional] -**dedupe** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Dedupe.md) | | [optional] -**expand** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Expand.md) | | [optional] -**filter** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Filter.md) | | [optional] -**flatten** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Flatten.md) | | [optional] -**git** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Git**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Git.md) | | [optional] -**group** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Group**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Group.md) | | [optional] -**image_pull_secrets** | [**[LocalObjectReference]**](LocalObjectReference.md) | | [optional] -**map** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Map**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Map.md) | | [optional] -**metadata** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Metadata.md) | | [optional] -**name** | **str** | | [optional] -**node_selector** | **{str: (str,)}** | | [optional] -**replicas** | **int** | | [optional] -**restart_policy** | **str** | | [optional] -**scale** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Scale.md) | | [optional] -**service_account_name** | **str** | | [optional] -**sidecar** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md) | | [optional] -**sinks** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Sink.md) | | [optional] -**sources** | [**[GithubComArgoprojLabsArgoDataflowApiV1alpha1Source]**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Source.md) | | [optional] -**terminator** | **bool** | | [optional] -**tolerations** | [**[Toleration]**](Toleration.md) | | [optional] -**volumes** | [**[Volume]**](Volume.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md deleted file mode 100644 index 4e05e9038321..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus.md +++ /dev/null @@ -1,17 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1StepStatus - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**last_scaled_at** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**message** | **str** | | [optional] -**phase** | **str** | | [optional] -**reason** | **str** | | [optional] -**replicas** | **int** | | [optional] -**selector** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md deleted file mode 100644 index 3291696e2ed2..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md +++ /dev/null @@ -1,14 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**ca_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md deleted file mode 100644 index 32abc09ca388..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink.md +++ /dev/null @@ -1,12 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSink - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_volume_source** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md b/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md deleted file mode 100644 index b8c47e4d9647..000000000000 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource.md +++ /dev/null @@ -1,15 +0,0 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1VolumeSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**abstract_volume_source** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource**](GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractVolumeSource.md) | | [optional] -**concurrency** | **int** | | [optional] -**poll_period** | [**Duration**](Duration.md) | | [optional] -**read_only** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/InfoServiceApi.md b/sdks/python/client/docs/InfoServiceApi.md index 6b1180299303..9383c732955b 100644 --- a/sdks/python/client/docs/InfoServiceApi.md +++ b/sdks/python/client/docs/InfoServiceApi.md @@ -4,11 +4,89 @@ All URIs are relative to *http://localhost:2746* Method | HTTP request | Description ------------- | ------------- | ------------- +[**collect_event**](InfoServiceApi.md#collect_event) | **POST** /api/v1/tracking/event | [**get_info**](InfoServiceApi.md#get_info) | **GET** /api/v1/info | [**get_user_info**](InfoServiceApi.md#get_user_info) | **GET** /api/v1/userinfo | [**get_version**](InfoServiceApi.md#get_version) | **GET** /api/v1/version | +# **collect_event** +> bool, date, datetime, dict, float, int, list, str, none_type collect_event(body) + + + +### Example + +* Api Key Authentication (BearerToken): +```python +import time +import argo_workflows +from argo_workflows.api import info_service_api +from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from pprint import pprint +# Defining the host is optional and defaults to http://localhost:2746 +# See configuration.py for a list of all supported configuration parameters. +configuration = argo_workflows.Configuration( + host = "http://localhost:2746" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' + +# Enter a context with an instance of the API client +with argo_workflows.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = info_service_api.InfoServiceApi(api_client) + body = IoArgoprojWorkflowV1alpha1CollectEventRequest( + name="name_example", + ) # IoArgoprojWorkflowV1alpha1CollectEventRequest | + + # example passing only required values which don't have defaults set + try: + api_response = api_instance.collect_event(body) + pprint(api_response) + except argo_workflows.ApiException as e: + print("Exception when calling InfoServiceApi->collect_event: %s\n" % e) +``` + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**IoArgoprojWorkflowV1alpha1CollectEventRequest**](IoArgoprojWorkflowV1alpha1CollectEventRequest.md)| | + +### Return type + +**bool, date, datetime, dict, float, int, list, str, none_type** + +### Authorization + +[BearerToken](../README.md#BearerToken) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **get_info** > IoArgoprojWorkflowV1alpha1InfoResponse get_info() @@ -16,6 +94,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -29,9 +108,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = info_service_api.InfoServiceApi(api_client) @@ -53,7 +142,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -76,6 +165,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -89,9 +179,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = info_service_api.InfoServiceApi(api_client) @@ -113,7 +213,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -136,6 +236,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -149,9 +250,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = info_service_api.InfoServiceApi(api_client) @@ -173,7 +284,7 @@ This endpoint does not need any parameter. ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md similarity index 50% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md index c4ce2a6856d5..35ac7533d9ae 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md @@ -1,12 +1,13 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLAction +# IoArgoprojWorkflowV1alpha1ArtGCStatus +ArtGCStatus maintains state related to ArtifactGC ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**on_error** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] -**on_record_not_found** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] -**statement** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SQLStatement.md) | | [optional] +**not_specified** | **bool** | if this is true, we already checked to see if we need to do it and we don't | [optional] +**pods_recouped** | **{str: (bool,)}** | have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once | [optional] +**strategies_processed** | **{str: (bool,)}** | have Pods been started to perform this strategy? (enables us not to re-process what we've already done) | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md index a4c3eb43c937..84af08865d9e 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md @@ -8,7 +8,10 @@ Name | Type | Description | Notes **name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] +**artifact_gc** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] +**deleted** | **bool** | Has this been deleted? | [optional] **_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] **from_expression** | **str** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md new file mode 100644 index 000000000000..5964b3a8e4ff --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md @@ -0,0 +1,15 @@ +# IoArgoprojWorkflowV1alpha1ArtifactGC + +ArtifactGC describes how to delete artifacts from completed Workflows + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pod_metadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] +**service_account_name** | **str** | ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion | [optional] +**strategy** | **str** | Strategy is the strategy to use. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md similarity index 57% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md index ae62cdb8d20f..f6a6a22eb920 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md @@ -1,10 +1,11 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AbstractStep +# IoArgoprojWorkflowV1alpha1ArtifactGCSpec +ArtifactGCSpec specifies the Artifacts that need to be deleted ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] +**artifacts_by_node** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}**](IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md) | ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md similarity index 58% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md index 772f944dde3d..05cedb1701e4 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md @@ -1,10 +1,11 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSEndpoint +# IoArgoprojWorkflowV1alpha1ArtifactGCStatus +ArtifactGCStatus describes the result of the deletion ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**url** | **str** | | [optional] +**artifact_results_by_node** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}**](IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md) | ArtifactResultsByNode maps Node name to result | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md index 8e4b6f9911bc..e2e4949deb82 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] **git** | [**IoArgoprojWorkflowV1alpha1GitArtifact**](IoArgoprojWorkflowV1alpha1GitArtifact.md) | | [optional] **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifact**](IoArgoprojWorkflowV1alpha1HDFSArtifact.md) | | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md similarity index 51% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md index c1b32a607a35..4217c51c7e00 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md @@ -1,11 +1,12 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1KafkaNET +# IoArgoprojWorkflowV1alpha1ArtifactNodeSpec +ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**sasl** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL**](GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md) | | [optional] -**tls** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS**](GithubComArgoprojLabsArgoDataflowApiV1alpha1TLS.md) | | [optional] +**archive_location** | [**IoArgoprojWorkflowV1alpha1ArtifactLocation**](IoArgoprojWorkflowV1alpha1ArtifactLocation.md) | | [optional] +**artifacts** | [**{str: (IoArgoprojWorkflowV1alpha1Artifact,)}**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts maps artifact name to Artifact description | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md index 1b0685d0e47d..25fe6ad26855 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md @@ -8,7 +8,10 @@ Name | Type | Description | Notes **name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] +**artifact_gc** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] +**deleted** | **bool** | Has this been deleted? | [optional] **_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] **from_expression** | **str** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md index aa532cc0b11e..c5db3c661979 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive_logs** | **bool** | ArchiveLogs enables log archiving | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md) | | [optional] +**azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifactRepository**](IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md) | | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifactRepository**](IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md) | | [optional] **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifactRepository**](IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md) | | [optional] **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifactRepository**](IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md new file mode 100644 index 000000000000..dad97f7d325f --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md @@ -0,0 +1,15 @@ +# IoArgoprojWorkflowV1alpha1ArtifactResult + +ArtifactResult describes the result of attempting to delete a given Artifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name is the name of the Artifact | +**error** | **str** | Error is an optional error message which should be set if Success==false | [optional] +**success** | **bool** | Success describes whether the deletion succeeded | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md similarity index 58% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md index 67c0670849cd..8677a87b5933 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md @@ -1,12 +1,11 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1SASL +# IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus +ArtifactResultNodeStatus describes the result of the deletion on a given node ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**mechanism** | **str** | | [optional] -**password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**user** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**artifact_results** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}**](IoArgoprojWorkflowV1alpha1ArtifactResult.md) | ArtifactResults maps Artifact name to result of the deletion | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md new file mode 100644 index 000000000000..68e7087d0aa3 --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md @@ -0,0 +1,17 @@ +# IoArgoprojWorkflowV1alpha1AzureArtifact + +AzureArtifact is the location of a an Azure Storage artifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**blob** | **str** | Blob is the blob name (i.e., path) in the container where the artifact resides | +**container** | **str** | Container is the container where resources will be stored | +**endpoint** | **str** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | +**account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md new file mode 100644 index 000000000000..2becb466ebb5 --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md @@ -0,0 +1,17 @@ +# IoArgoprojWorkflowV1alpha1AzureArtifactRepository + +AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**container** | **str** | Container is the container where resources will be stored | +**endpoint** | **str** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | +**account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**blob_name_format** | **str** | BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables | [optional] +**use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md similarity index 64% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md index f2233f3af664..52326a531986 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md @@ -1,10 +1,12 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Sidecar +# IoArgoprojWorkflowV1alpha1BasicAuth +BasicAuth describes the secret selectors required for basic authentication ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] +**password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md similarity index 61% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md index 468e452e3027..b2e389e51983 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md @@ -1,12 +1,12 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1AWSCredentials +# IoArgoprojWorkflowV1alpha1ClientCertAuth +ClientCertAuth holds necessary information for client authentication via certificates ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**access_key_id** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**secret_access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**session_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**client_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**client_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md similarity index 83% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md index dec83082a80f..5595664f1a1a 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md @@ -1,11 +1,10 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Storage +# IoArgoprojWorkflowV1alpha1CollectEventRequest ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] -**sub_path** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md index d28a57fa2a66..4a414a1de45c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md @@ -5,12 +5,12 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | -**args** | **[str]** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **dependencies** | **[str]** | | [optional] **env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] -**image** | **str** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] +**image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md index 14dd15c103e4..36d566411b0f 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md @@ -9,6 +9,7 @@ Name | Type | Description | Notes **groups** | **[str]** | | [optional] **issuer** | **str** | | [optional] **service_account_name** | **str** | | [optional] +**service_account_namespace** | **str** | | [optional] **subject** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md index 8ee36decfc2f..b2c81b02c31a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md @@ -6,12 +6,14 @@ GitArtifact is the location of an git artifact Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **repo** | **str** | Repo is the git repository | +**branch** | **str** | Branch is the branch to fetch when `SingleBranch` is enabled | [optional] **depth** | **int** | Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip | [optional] **disable_submodules** | **bool** | DisableSubmodules disables submodules during git clone | [optional] **fetch** | **[str]** | Fetch specifies a number of refs that should be fetched before checkout | [optional] **insecure_ignore_host_key** | **bool** | InsecureIgnoreHostKey disables SSH strict host key checking during git clone | [optional] **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **revision** | **str** | Revision is the git commit, tag, branch to checkout | [optional] +**single_branch** | **bool** | SingleBranch enables single branch clone, using the `branch` parameter | [optional] **ssh_private_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md index 72b855a48b86..da15caee5242 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md @@ -6,8 +6,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **url** | **str** | URL of the HTTP Request | **body** | **str** | Body is content of the HTTP Request | [optional] +**body_from** | [**IoArgoprojWorkflowV1alpha1HTTPBodySource**](IoArgoprojWorkflowV1alpha1HTTPBodySource.md) | | [optional] **headers** | [**[IoArgoprojWorkflowV1alpha1HTTPHeader]**](IoArgoprojWorkflowV1alpha1HTTPHeader.md) | Headers are an optional list of headers to send with HTTP requests | [optional] -**insecure_skip_verify** | **bool** | insecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | [optional] +**insecure_skip_verify** | **bool** | InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | [optional] **method** | **str** | Method is HTTP methods for HTTP Request | [optional] **success_condition** | **str** | SuccessCondition is an expression if evaluated to true is considered successful | [optional] **timeout_seconds** | **int** | TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md index 3142486ce613..a9f98eccaadc 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md @@ -1,11 +1,12 @@ # IoArgoprojWorkflowV1alpha1HTTPArtifact -HTTPArtifact allows an file served on HTTP to be placed as an input artifact in a container +HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **url** | **str** | URL of the artifact | +**auth** | [**IoArgoprojWorkflowV1alpha1HTTPAuth**](IoArgoprojWorkflowV1alpha1HTTPAuth.md) | | [optional] **headers** | [**[IoArgoprojWorkflowV1alpha1Header]**](IoArgoprojWorkflowV1alpha1Header.md) | Headers are an optional list of headers to send with HTTP requests for artifacts | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md new file mode 100644 index 000000000000..d6132bfbd9b4 --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md @@ -0,0 +1,14 @@ +# IoArgoprojWorkflowV1alpha1HTTPAuth + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**basic_auth** | [**IoArgoprojWorkflowV1alpha1BasicAuth**](IoArgoprojWorkflowV1alpha1BasicAuth.md) | | [optional] +**client_cert** | [**IoArgoprojWorkflowV1alpha1ClientCertAuth**](IoArgoprojWorkflowV1alpha1ClientCertAuth.md) | | [optional] +**oauth2** | [**IoArgoprojWorkflowV1alpha1OAuth2Auth**](IoArgoprojWorkflowV1alpha1OAuth2Auth.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md similarity index 76% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md index 50f13914d48f..39f71bfa00ef 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md @@ -1,10 +1,11 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1HTTPSource +# IoArgoprojWorkflowV1alpha1HTTPBodySource +HTTPBodySource contains the source of the HTTP body. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**service_name** | **str** | | [optional] +**bytes** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md index 283ca30e77fd..b269599dd23b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md @@ -4,9 +4,9 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**template** | **str** | Template is the name of the template to execute by the hook | **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **expression** | **str** | Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored | [optional] +**template** | **str** | Template is the name of the template to execute by the hook | [optional] **template_ref** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md similarity index 75% rename from sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md index e085e8b6e17b..8fbb31c80b6c 100644 --- a/sdks/python/client/docs/GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md @@ -1,11 +1,10 @@ -# GithubComArgoprojLabsArgoDataflowApiV1alpha1Cron +# IoArgoprojWorkflowV1alpha1ManifestFrom ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**layout** | **str** | | [optional] -**schedule** | **str** | | [optional] +**artifact** | [**IoArgoprojWorkflowV1alpha1Artifact**](IoArgoprojWorkflowV1alpha1Artifact.md) | | **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md new file mode 100644 index 000000000000..094f934aeb28 --- /dev/null +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md @@ -0,0 +1,17 @@ +# IoArgoprojWorkflowV1alpha1OAuth2Auth + +OAuth2Auth holds all information for client authentication via OAuth2 tokens + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**client_id_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**client_secret_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**endpoint_params** | [**[IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]**](IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md) | | [optional] +**scopes** | **[str]** | | [optional] +**token_url_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/Duration.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md similarity index 62% rename from sdks/python/client/docs/Duration.md rename to sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md index 599dd08ebc7f..8b30d7b2619e 100644 --- a/sdks/python/client/docs/Duration.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md @@ -1,11 +1,12 @@ -# Duration +# IoArgoprojWorkflowV1alpha1OAuth2EndpointParam -Duration is a wrapper around time.Duration which supports correct marshaling to YAML and JSON. In particular, it marshals into strings, which can be used as map keys in json. +EndpointParam is for requesting optional fields that should be sent in the oauth request ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**duration** | **str** | | [optional] +**key** | **str** | Name is the header name | +**value** | **str** | Value is the literal value to use for the header | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md index a55f4a0ff55e..0d6badfbf855 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md @@ -9,6 +9,7 @@ Name | Type | Description | Notes **failure_condition** | **str** | FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed | [optional] **flags** | **[str]** | Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ] | [optional] **manifest** | **str** | Manifest contains the kubernetes manifest | [optional] +**manifest_from** | [**IoArgoprojWorkflowV1alpha1ManifestFrom**](IoArgoprojWorkflowV1alpha1ManifestFrom.md) | | [optional] **merge_strategy** | **str** | MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json | [optional] **set_owner_reference** | **bool** | SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. | [optional] **success_condition** | **str** | SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md index d470821b1fce..f27f935a3f4c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes **memoized** | **bool** | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] +**parameters** | **[str]** | | [optional] **uid** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md index 18ed50d3f9f2..cdbf4e08e85f 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] +**parameters** | **[str]** | | [optional] **restart_successful** | **bool** | | [optional] **uid** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md index c8be93c63212..8d564f3609c8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md @@ -5,10 +5,10 @@ ScriptTemplate is a template subtype to enable scripting through code steps ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**image** | **str** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | +**image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | **source** | **str** | Source contains the source code of the script to execute | -**args** | **[str]** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md index 3080a63e5a4a..9ca82415c8fe 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md @@ -6,11 +6,11 @@ UserContainer is a container specified by a user. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | -**args** | **[str]** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] **env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] **env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] -**image** | **str** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] +**image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md index 6eae6e2c81d1..fc1293c91ae8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes **memoized** | **bool** | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] +**parameters** | **[str]** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md index 3cfaf09b5d3d..11069d64d1b8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] +**parameters** | **[str]** | | [optional] **restart_successful** | **bool** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md index 825ff48b3201..4c7208c35ec0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md @@ -9,6 +9,7 @@ Name | Type | Description | Notes **affinity** | [**Affinity**](Affinity.md) | | [optional] **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] +**artifact_gc** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifact_repository_ref** | [**IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef**](IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md) | | [optional] **automount_service_account_token** | **bool** | AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false. | [optional] **dns_config** | [**PodDNSConfig**](PodDNSConfig.md) | | [optional] @@ -26,7 +27,7 @@ Name | Type | Description | Notes **pod_disruption_budget** | [**IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec**](IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec.md) | | [optional] **pod_gc** | [**IoArgoprojWorkflowV1alpha1PodGC**](IoArgoprojWorkflowV1alpha1PodGC.md) | | [optional] **pod_metadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] -**pod_priority** | **int** | Priority to apply to workflow pods. | [optional] +**pod_priority** | **int** | Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead. | [optional] **pod_priority_class_name** | **str** | PriorityClassName to apply to workflow pods. | [optional] **pod_spec_patch** | **str** | PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits). | [optional] **priority** | **int** | Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first. | [optional] diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md index e71e594f2f9e..3d6c50ccff76 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md @@ -5,6 +5,7 @@ WorkflowStatus contains overall status information about a workflow ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**artifact_gc_status** | [**IoArgoprojWorkflowV1alpha1ArtGCStatus**](IoArgoprojWorkflowV1alpha1ArtGCStatus.md) | | [optional] **artifact_repository_ref** | [**IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus**](IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md) | | [optional] **compressed_nodes** | **str** | Compressed and base64 decoded Nodes map | [optional] **conditions** | [**[IoArgoprojWorkflowV1alpha1Condition]**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the Workflow may have | [optional] diff --git a/sdks/python/client/docs/PipelineLogEntry.md b/sdks/python/client/docs/PipelineLogEntry.md deleted file mode 100644 index c5d2393a618d..000000000000 --- a/sdks/python/client/docs/PipelineLogEntry.md +++ /dev/null @@ -1,16 +0,0 @@ -# PipelineLogEntry - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**msg** | **str** | | [optional] -**namespace** | **str** | | [optional] -**pipeline_name** | **str** | | [optional] -**step_name** | **str** | | [optional] -**time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/PipelinePipelineWatchEvent.md b/sdks/python/client/docs/PipelinePipelineWatchEvent.md deleted file mode 100644 index e64c0a5e8b18..000000000000 --- a/sdks/python/client/docs/PipelinePipelineWatchEvent.md +++ /dev/null @@ -1,13 +0,0 @@ -# PipelinePipelineWatchEvent - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**object** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) | | [optional] -**type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/PipelineServiceApi.md b/sdks/python/client/docs/PipelineServiceApi.md deleted file mode 100644 index 7d3e4e382e83..000000000000 --- a/sdks/python/client/docs/PipelineServiceApi.md +++ /dev/null @@ -1,611 +0,0 @@ -# argo_workflows.PipelineServiceApi - -All URIs are relative to *http://localhost:2746* - -Method | HTTP request | Description -------------- | ------------- | ------------- -[**delete_pipeline**](PipelineServiceApi.md#delete_pipeline) | **DELETE** /api/v1/pipelines/{namespace}/{name} | -[**get_pipeline**](PipelineServiceApi.md#get_pipeline) | **GET** /api/v1/pipelines/{namespace}/{name} | -[**list_pipelines**](PipelineServiceApi.md#list_pipelines) | **GET** /api/v1/pipelines/{namespace} | -[**pipeline_logs**](PipelineServiceApi.md#pipeline_logs) | **GET** /api/v1/stream/pipelines/{namespace}/logs | -[**restart_pipeline**](PipelineServiceApi.md#restart_pipeline) | **POST** /api/v1/pipelines/{namespace}/{name}/restart | -[**watch_pipelines**](PipelineServiceApi.md#watch_pipelines) | **GET** /api/v1/stream/pipelines/{namespace} | -[**watch_steps**](PipelineServiceApi.md#watch_steps) | **GET** /api/v1/stream/steps/{namespace} | - - -# **delete_pipeline** -> bool, date, datetime, dict, float, int, list, str, none_type delete_pipeline(namespace, name) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) - delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_pipeline(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->delete_pipeline: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.delete_pipeline(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->delete_pipeline: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] - -### Return type - -**bool, date, datetime, dict, float, int, list, str, none_type** - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **get_pipeline** -> GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline get_pipeline(namespace, name) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline import GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_pipeline(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->get_pipeline: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.get_pipeline(namespace, name, get_options_resource_version=get_options_resource_version) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->get_pipeline: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - -### Return type - -[**GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Pipeline.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **list_pipelines** -> GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList list_pipelines(namespace) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.github_com_argoproj_labs_argo_dataflow_api_v1alpha1_pipeline_list import GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) - list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) - list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_pipelines(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->list_pipelines: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.list_pipelines(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->list_pipelines: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList**](GithubComArgoprojLabsArgoDataflowApiV1alpha1PipelineList.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **pipeline_logs** -> StreamResultOfPipelineLogEntry pipeline_logs(namespace) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_pipeline_log_entry import StreamResultOfPipelineLogEntry -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | optional - only return entries for this pipeline. (optional) - step_name = "stepName_example" # str | optional - only return entries for this step. (optional) - grep = "grep_example" # str | optional - only return entries which match this expresssion. (optional) - pod_log_options_container = "podLogOptions.container_example" # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) - pod_log_options_follow = True # bool | Follow the log stream of the pod. Defaults to false. +optional. (optional) - pod_log_options_previous = True # bool | Return previous terminated container logs. Defaults to false. +optional. (optional) - pod_log_options_since_seconds = "podLogOptions.sinceSeconds_example" # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) - pod_log_options_since_time_seconds = "podLogOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) - pod_log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) - pod_log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) - pod_log_options_limit_bytes = "podLogOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) - pod_log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.pipeline_logs(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->pipeline_logs: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.pipeline_logs(namespace, name=name, step_name=step_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->pipeline_logs: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| optional - only return entries for this pipeline. | [optional] - **step_name** | **str**| optional - only return entries for this step. | [optional] - **grep** | **str**| optional - only return entries which match this expresssion. | [optional] - **pod_log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **pod_log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **pod_log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **pod_log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] - -### Return type - -[**StreamResultOfPipelineLogEntry**](StreamResultOfPipelineLogEntry.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **restart_pipeline** -> bool, date, datetime, dict, float, int, list, str, none_type restart_pipeline(namespace, name) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.restart_pipeline(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->restart_pipeline: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - -### Return type - -**bool, date, datetime, dict, float, int, list, str, none_type** - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response. | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **watch_pipelines** -> StreamResultOfPipelinePipelineWatchEvent watch_pipelines(namespace) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_pipeline_pipeline_watch_event import StreamResultOfPipelinePipelineWatchEvent -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) - list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) - list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_pipelines(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->watch_pipelines: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.watch_pipelines(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->watch_pipelines: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**StreamResultOfPipelinePipelineWatchEvent**](StreamResultOfPipelinePipelineWatchEvent.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **watch_steps** -> StreamResultOfPipelineStepWatchEvent watch_steps(namespace) - - - -### Example - -```python -import time -import argo_workflows -from argo_workflows.api import pipeline_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_pipeline_step_watch_event import StreamResultOfPipelineStepWatchEvent -from pprint import pprint -# Defining the host is optional and defaults to http://localhost:2746 -# See configuration.py for a list of all supported configuration parameters. -configuration = argo_workflows.Configuration( - host = "http://localhost:2746" -) - - -# Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: - # Create an instance of the API class - api_instance = pipeline_service_api.PipelineServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) - list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) - list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_steps(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->watch_steps: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values - try: - api_response = api_instance.watch_steps(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling PipelineServiceApi->watch_steps: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - -### Return type - -[**StreamResultOfPipelineStepWatchEvent**](StreamResultOfPipelineStepWatchEvent.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**200** | A successful response.(streaming responses) | - | -**0** | An unexpected error response. | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - diff --git a/sdks/python/client/docs/PipelineStepWatchEvent.md b/sdks/python/client/docs/PipelineStepWatchEvent.md deleted file mode 100644 index 4628a40c6b08..000000000000 --- a/sdks/python/client/docs/PipelineStepWatchEvent.md +++ /dev/null @@ -1,13 +0,0 @@ -# PipelineStepWatchEvent - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**object** | [**GithubComArgoprojLabsArgoDataflowApiV1alpha1Step**](GithubComArgoprojLabsArgoDataflowApiV1alpha1Step.md) | | [optional] -**type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/SensorServiceApi.md b/sdks/python/client/docs/SensorServiceApi.md index 891b985be406..6ada855bd6fb 100644 --- a/sdks/python/client/docs/SensorServiceApi.md +++ b/sdks/python/client/docs/SensorServiceApi.md @@ -20,6 +20,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -34,9 +35,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -1834,7 +1845,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -1857,6 +1868,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -1869,9 +1881,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -1921,7 +1943,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -1944,6 +1966,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -1957,9 +1980,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -1997,7 +2030,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2020,6 +2053,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2033,9 +2067,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -2087,7 +2131,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2110,6 +2154,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2123,9 +2168,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -2185,7 +2240,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -2208,6 +2263,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -2222,9 +2278,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -4018,7 +4084,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -4041,6 +4107,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -4054,9 +4121,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = sensor_service_api.SensorServiceApi(api_client) namespace = "namespace_example" # str | @@ -4108,7 +4185,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/StreamResultOfPipelineLogEntry.md b/sdks/python/client/docs/StreamResultOfPipelineLogEntry.md deleted file mode 100644 index 2ba03ecbc0d6..000000000000 --- a/sdks/python/client/docs/StreamResultOfPipelineLogEntry.md +++ /dev/null @@ -1,13 +0,0 @@ -# StreamResultOfPipelineLogEntry - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelineLogEntry**](PipelineLogEntry.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/StreamResultOfPipelinePipelineWatchEvent.md b/sdks/python/client/docs/StreamResultOfPipelinePipelineWatchEvent.md deleted file mode 100644 index 4b3c64a5d483..000000000000 --- a/sdks/python/client/docs/StreamResultOfPipelinePipelineWatchEvent.md +++ /dev/null @@ -1,13 +0,0 @@ -# StreamResultOfPipelinePipelineWatchEvent - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelinePipelineWatchEvent**](PipelinePipelineWatchEvent.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/StreamResultOfPipelineStepWatchEvent.md b/sdks/python/client/docs/StreamResultOfPipelineStepWatchEvent.md deleted file mode 100644 index d9f2218fb10d..000000000000 --- a/sdks/python/client/docs/StreamResultOfPipelineStepWatchEvent.md +++ /dev/null @@ -1,13 +0,0 @@ -# StreamResultOfPipelineStepWatchEvent - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] -**result** | [**PipelineStepWatchEvent**](PipelineStepWatchEvent.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/WorkflowServiceApi.md b/sdks/python/client/docs/WorkflowServiceApi.md index 847cb110fbef..1adabe95cf51 100644 --- a/sdks/python/client/docs/WorkflowServiceApi.md +++ b/sdks/python/client/docs/WorkflowServiceApi.md @@ -30,6 +30,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44,9 +45,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -328,6 +339,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -341,6 +364,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -353,6 +388,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -366,6 +402,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -405,6 +442,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -505,6 +594,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -542,6 +643,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -555,6 +668,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -567,6 +692,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -580,6 +706,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -619,6 +746,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1107,6 +1286,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -1117,6 +1307,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1130,6 +1321,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1168,6 +1360,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1812,6 +2056,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -1825,6 +2081,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -1837,6 +2105,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1850,6 +2119,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1889,6 +2159,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2010,6 +2332,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2023,6 +2357,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2035,6 +2381,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2048,6 +2395,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2087,6 +2435,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2230,6 +2630,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2243,6 +2655,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2255,6 +2679,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2268,6 +2693,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2307,6 +2733,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2399,6 +2877,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -2702,258 +3183,43 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, ), + service_account_name="service_account_name_example", + strategy="strategy_example", ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - repo="repo_example", - revision="revision_example", - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - security_token="security_token_example", - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket="bucket_example", - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), + blob="blob_example", + container="container_example", endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), use_sdk_creds=True, ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2966,6 +3232,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2979,6 +3246,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3018,6 +3286,375 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -3132,6 +3769,243 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -3741,6 +4615,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3754,6 +4640,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3766,6 +4664,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3779,6 +4678,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3818,6 +4718,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -3935,6 +4887,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3948,6 +4912,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3960,6 +4936,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3973,6 +4950,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4012,6 +4990,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -4766,6 +5796,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -4776,6 +5817,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -4789,6 +5831,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4827,6 +5870,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5471,6 +6566,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5484,6 +6591,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5496,6 +6615,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5509,6 +6629,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5548,6 +6669,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5669,6 +6842,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5682,6 +6867,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5694,6 +6891,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5707,6 +6905,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5746,6 +6945,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5889,6 +7140,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5902,6 +7165,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5914,6 +7189,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5927,6 +7203,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5966,6 +7243,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6058,6 +7387,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -6361,6 +7693,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6374,6 +7718,335 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6386,6 +8059,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6399,6 +8073,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6438,6 +8113,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6510,6 +8237,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -6537,61 +8265,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -6600,6 +8289,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6613,6 +8314,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6625,6 +8338,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6638,6 +8352,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6677,6 +8392,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6748,49 +8515,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -7400,6 +9125,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7413,6 +9150,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -7425,6 +9174,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7438,6 +9188,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7477,6 +9228,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -7594,6 +9397,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7607,6 +9422,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -7619,6 +9446,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7632,6 +9460,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7671,6 +9500,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -8720,6 +10601,15 @@ with argo_workflows.ApiClient() as api_client: ), ), status=IoArgoprojWorkflowV1alpha1WorkflowStatus( + artifact_gc_status=IoArgoprojWorkflowV1alpha1ArtGCStatus( + not_specified=True, + pods_recouped={ + "key": True, + }, + strategies_processed={ + "key": True, + }, + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus( artifact_repository=IoArgoprojWorkflowV1alpha1ArtifactRepository( archive_logs=True, @@ -8736,6 +10626,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifactRepository( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob_name_format="blob_name_format_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifactRepository( bucket="bucket_example", key_format="key_format_example", @@ -8865,6 +10766,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -8878,6 +10791,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -8890,6 +10815,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -8903,6 +10829,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -8942,6 +10869,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9063,6 +11042,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9076,6 +11067,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9088,6 +11091,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9101,6 +11105,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9140,6 +11145,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9274,6 +11331,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9287,6 +11356,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9299,6 +11380,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9312,6 +11394,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9351,6 +11434,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10055,6 +12190,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -10065,6 +12211,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10078,6 +12225,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10116,6 +12264,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10760,6 +12960,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10773,6 +12985,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10785,6 +13009,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10798,6 +13023,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10837,6 +13063,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10958,6 +13236,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10971,6 +13261,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10983,6 +13285,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10996,6 +13299,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11035,6 +13339,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11178,6 +13534,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11191,6 +13559,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11203,6 +13583,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11216,6 +13597,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11255,6 +13637,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11347,6 +13781,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -11650,6 +14087,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11663,6 +14112,335 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11675,6 +14453,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11688,6 +14467,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11727,6 +14507,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11799,6 +14631,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -11826,61 +14659,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -11889,6 +14683,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11902,6 +14708,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11914,6 +14732,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11927,6 +14746,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11966,6 +14786,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -12037,49 +14909,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -12689,6 +15519,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12702,6 +15544,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12714,6 +15568,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12727,6 +15582,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12766,6 +15622,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -12883,6 +15791,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12896,6 +15816,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12908,6 +15840,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12921,6 +15854,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12960,6 +15894,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -13711,6 +16697,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -13724,6 +16722,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -13736,6 +16746,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -13749,6 +16760,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -13788,6 +16800,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -13888,6 +16952,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -13925,6 +17001,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -13938,6 +17026,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -13950,6 +17050,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -13963,6 +17064,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14002,6 +17104,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14490,6 +17644,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -14500,6 +17665,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14513,6 +17679,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14551,6 +17718,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15195,6 +18414,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15208,6 +18439,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15220,6 +18463,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15233,6 +18477,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15272,6 +18517,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15393,6 +18690,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15406,6 +18715,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15418,6 +18739,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15431,6 +18753,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15470,6 +18793,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15613,6 +18988,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15626,6 +19013,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15638,6 +19037,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15651,6 +19051,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15690,6 +19091,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15782,6 +19235,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -16085,6 +19541,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16098,6 +19566,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16110,6 +19590,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16123,6 +19604,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16162,6 +19644,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -16324,6 +19858,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16337,6 +19883,297 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + exit_code="exit_code_example", + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + result="result_example", + ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", + ], + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16349,6 +20186,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16362,6 +20200,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16401,6 +20240,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -16472,49 +20363,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -17124,6 +20973,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -17137,6 +20998,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -17149,6 +21022,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -17162,6 +21036,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -17201,6 +21076,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -17318,6 +21245,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -17331,6 +21270,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -17343,6 +21294,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -17356,6 +21308,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -17395,6 +21348,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18149,6 +22154,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -18159,6 +22175,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18172,6 +22189,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18210,6 +22228,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18854,6 +22924,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18867,6 +22949,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18879,6 +22973,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18892,6 +22987,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18931,6 +23027,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19052,6 +23200,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19065,6 +23225,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19077,6 +23249,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19090,6 +23263,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19129,6 +23303,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19272,6 +23498,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19285,6 +23523,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19297,6 +23547,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19310,6 +23561,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19349,6 +23601,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19441,6 +23745,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -19744,6 +24051,335 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19757,6 +24393,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19769,6 +24417,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19782,6 +24431,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19821,6 +24471,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19893,6 +24595,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -19920,61 +24623,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -19983,6 +24647,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19996,6 +24672,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20008,6 +24696,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20021,6 +24710,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20060,6 +24750,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20131,49 +24873,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -20783,6 +25483,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20796,6 +25508,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20808,6 +25532,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20821,6 +25546,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20860,6 +25586,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20977,6 +25755,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20990,6 +25780,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21002,6 +25804,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21015,6 +25818,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21054,6 +25858,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -22162,7 +27018,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -22185,6 +27041,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -22197,9 +27054,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -22212,6 +27079,7 @@ with argo_workflows.ApiClient() as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) + force = True # bool | (optional) # example passing only required values which don't have defaults set try: @@ -22223,7 +27091,7 @@ with argo_workflows.ApiClient() as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, force=force) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling WorkflowServiceApi->delete_workflow: %s\n" % e) @@ -22242,6 +27110,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **force** | **bool**| | [optional] ### Return type @@ -22249,7 +27118,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -22272,6 +27141,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -22285,9 +27155,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -22327,7 +27207,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -22350,6 +27230,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -22364,9 +27245,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -22639,6 +27530,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -22652,6 +27555,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -22664,6 +27579,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22677,6 +27593,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22716,6 +27633,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -22816,6 +27785,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -22853,6 +27834,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -22866,6 +27859,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -22878,6 +27883,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22891,6 +27897,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22930,6 +27937,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23418,6 +28477,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -23428,6 +28498,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23441,6 +28512,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23479,6 +28551,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24123,6 +29247,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24136,6 +29272,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24148,6 +29296,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24161,6 +29310,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24200,6 +29350,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24321,6 +29523,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24334,6 +29548,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24346,6 +29572,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24359,6 +29586,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24398,6 +29626,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24541,6 +29821,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24554,6 +29846,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24566,6 +29870,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24579,6 +29884,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24618,6 +29924,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24710,6 +30068,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -25013,6 +30374,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25026,6 +30399,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25038,6 +30423,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25051,6 +30437,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25090,6 +30477,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -25252,6 +30691,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25265,6 +30716,297 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + exit_code="exit_code_example", + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + result="result_example", + ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", + ], + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25277,6 +31019,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25290,6 +31033,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25329,6 +31073,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -25400,49 +31196,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -26052,6 +31806,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -26065,6 +31831,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -26077,6 +31855,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -26090,6 +31869,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -26129,6 +31909,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -26246,6 +32078,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -26259,6 +32103,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -26271,6 +32127,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -26284,6 +32141,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -26323,6 +32181,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -27077,6 +32987,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -27087,6 +33008,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -27100,6 +33022,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -27138,6 +33061,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -27782,6 +33757,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -27795,6 +33782,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -27807,6 +33806,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -27820,6 +33820,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -27859,6 +33860,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -27980,6 +34033,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -27993,6 +34058,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -28005,6 +34082,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -28018,6 +34096,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -28057,6 +34136,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -28200,6 +34331,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -28213,6 +34356,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -28225,6 +34380,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -28238,6 +34394,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -28277,6 +34434,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -28369,6 +34578,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -28672,6 +34884,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -28685,6 +34909,335 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -28697,6 +35250,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -28710,6 +35264,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -28749,6 +35304,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -28821,6 +35428,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -28848,61 +35456,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -28911,6 +35480,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -28924,6 +35505,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -28936,6 +35529,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -28949,6 +35543,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -28988,6 +35583,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -29059,49 +35706,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -29711,6 +36316,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -29724,6 +36341,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -29736,6 +36365,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -29749,6 +36379,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -29788,6 +36419,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -29905,6 +36588,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -29918,6 +36613,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -29930,6 +36637,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -29943,6 +36651,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -29982,6 +36691,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -31031,6 +37792,15 @@ with argo_workflows.ApiClient() as api_client: ), ), status=IoArgoprojWorkflowV1alpha1WorkflowStatus( + artifact_gc_status=IoArgoprojWorkflowV1alpha1ArtGCStatus( + not_specified=True, + pods_recouped={ + "key": True, + }, + strategies_processed={ + "key": True, + }, + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus( artifact_repository=IoArgoprojWorkflowV1alpha1ArtifactRepository( archive_logs=True, @@ -31047,6 +37817,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifactRepository( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob_name_format="blob_name_format_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifactRepository( bucket="bucket_example", key_format="key_format_example", @@ -31176,6 +37957,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -31189,6 +37982,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -31201,6 +38006,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -31214,6 +38020,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -31253,6 +38060,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -31374,6 +38233,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -31387,6 +38258,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -31399,6 +38282,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -31412,6 +38296,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -31451,6 +38336,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -31585,6 +38522,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -31598,6 +38547,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -31610,6 +38571,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -31623,6 +38585,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -31662,6 +38625,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -32366,6 +39381,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -32376,6 +39402,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -32389,6 +39416,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -32427,6 +39455,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -33071,6 +40151,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -33084,6 +40176,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -33096,6 +40200,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -33109,6 +40214,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -33148,6 +40254,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -33269,6 +40427,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -33282,6 +40452,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -33294,6 +40476,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -33307,6 +40490,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -33346,6 +40530,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -33489,6 +40725,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -33502,6 +40750,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -33514,6 +40774,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -33527,6 +40788,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -33566,6 +40828,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -33658,6 +40972,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -33961,6 +41278,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -33974,6 +41303,335 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -33986,6 +41644,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -33999,6 +41658,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -34038,6 +41698,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -34110,6 +41822,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -34137,61 +41850,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -34200,6 +41874,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -34213,6 +41899,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -34225,6 +41923,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -34238,6 +41937,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -34277,6 +41977,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -34348,49 +42100,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -35000,6 +42710,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -35013,6 +42735,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -35025,6 +42759,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -35038,6 +42773,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -35077,6 +42813,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -35194,6 +42982,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -35207,6 +43007,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -35219,6 +43031,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -35232,6 +43045,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -35271,6 +43085,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -36022,6 +43888,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -36035,6 +43913,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -36047,6 +43937,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -36060,6 +43951,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -36099,6 +43991,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -36199,6 +44143,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -36236,6 +44192,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -36249,6 +44217,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -36261,6 +44241,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -36274,6 +44255,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -36313,6 +44295,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -36801,6 +44835,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -36811,6 +44856,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -36824,6 +44870,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -36862,6 +44909,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -37506,6 +45605,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -37519,6 +45630,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -37531,6 +45654,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -37544,6 +45668,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -37583,6 +45708,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -37704,6 +45881,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -37717,6 +45906,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -37729,6 +45930,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -37742,6 +45944,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -37781,6 +45984,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -37924,6 +46179,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -37937,6 +46204,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -37949,6 +46228,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -37962,6 +46242,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -38001,6 +46282,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -38093,6 +46426,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -38396,6 +46732,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -38409,6 +46757,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -38421,6 +46781,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -38434,6 +46795,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -38473,6 +46835,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -38635,6 +47049,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -38648,6 +47074,297 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + exit_code="exit_code_example", + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + result="result_example", + ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", + ], + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -38660,6 +47377,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -38673,6 +47391,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -38712,6 +47431,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -38783,49 +47554,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -39435,6 +48164,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -39448,6 +48189,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -39460,6 +48213,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -39473,6 +48227,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -39512,6 +48267,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -39629,6 +48436,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -39642,6 +48461,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -39654,6 +48485,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -39667,6 +48499,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -39706,6 +48539,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -40460,6 +49345,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -40470,6 +49366,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -40483,6 +49380,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -40521,6 +49419,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -41165,6 +50115,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -41178,6 +50140,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -41190,6 +50164,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -41203,6 +50178,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -41242,6 +50218,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -41363,6 +50391,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -41376,6 +50416,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -41388,6 +50440,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -41401,6 +50454,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -41440,6 +50494,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -41583,6 +50689,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -41596,6 +50714,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -41608,6 +50738,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -41621,6 +50752,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -41660,6 +50792,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -41752,6 +50936,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -42055,6 +51242,335 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -42068,6 +51584,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -42080,6 +51608,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -42093,6 +51622,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -42132,6 +51662,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -42204,6 +51786,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -42231,61 +51814,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -42294,6 +51838,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -42307,6 +51863,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -42319,6 +51887,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -42332,6 +51901,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -42371,6 +51941,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -42442,49 +52064,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -43094,6 +52674,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -43107,6 +52699,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -43119,6 +52723,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -43132,6 +52737,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -43171,6 +52777,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -43288,6 +52946,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -43301,6 +52971,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -43313,6 +52995,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -43326,6 +53009,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -43365,6 +53049,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -44473,7 +54209,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44496,6 +54232,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44509,9 +54246,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44565,7 +54312,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44588,6 +54335,7 @@ DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44601,9 +54349,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44667,7 +54425,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44690,6 +54448,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44704,9 +54463,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44715,6 +54484,9 @@ with argo_workflows.ApiClient() as api_client: memoized=True, name="name_example", namespace="namespace_example", + parameters=[ + "parameters_example", + ], ) # IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest | # example passing only required values which don't have defaults set @@ -44740,7 +54512,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44763,6 +54535,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44777,9 +54550,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44813,7 +54596,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44836,6 +54619,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44850,9 +54634,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44861,6 +54655,9 @@ with argo_workflows.ApiClient() as api_client: name="name_example", namespace="namespace_example", node_field_selector="node_field_selector_example", + parameters=[ + "parameters_example", + ], restart_successful=True, ) # IoArgoprojWorkflowV1alpha1WorkflowRetryRequest | @@ -44887,7 +54684,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44910,6 +54707,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -44924,9 +54722,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -44963,7 +54771,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -44986,6 +54794,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45000,9 +54809,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45037,7 +54856,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45060,6 +54879,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45074,9 +54894,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45131,7 +54961,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45154,6 +54984,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45168,9 +54999,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45203,7 +55044,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45226,6 +55067,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45240,9 +55082,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45275,7 +55127,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45298,6 +55150,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45311,9 +55164,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45365,7 +55228,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45388,6 +55251,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45401,9 +55265,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45457,7 +55331,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -45480,6 +55354,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -45493,9 +55368,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_service_api.WorkflowServiceApi(api_client) namespace = "namespace_example" # str | @@ -45557,7 +55442,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/client/docs/WorkflowTemplateServiceApi.md b/sdks/python/client/docs/WorkflowTemplateServiceApi.md index e51f70201d0d..1cad5096f935 100644 --- a/sdks/python/client/docs/WorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/WorkflowTemplateServiceApi.md @@ -19,6 +19,7 @@ Method | HTTP request | Description ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -33,9 +34,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -315,6 +326,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -328,6 +351,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -340,6 +375,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -353,6 +389,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -392,6 +429,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -492,6 +581,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -529,6 +630,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -542,6 +655,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -554,6 +679,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -567,6 +693,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -606,6 +733,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1094,6 +1273,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -1104,6 +1294,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1117,6 +1308,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1155,6 +1347,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1799,6 +2043,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -1812,6 +2068,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -1824,6 +2092,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -1837,6 +2106,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1876,6 +2146,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -1997,6 +2319,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2010,6 +2344,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2022,6 +2368,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2035,6 +2382,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2074,6 +2422,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2217,8 +2617,20 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, @@ -2230,6 +2642,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2242,6 +2666,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2255,6 +2680,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2294,6 +2720,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2386,6 +2864,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -2689,6 +3170,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2702,6 +3195,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2714,6 +3219,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2727,6 +3233,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2766,6 +3273,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -2928,6 +3487,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -2941,6 +3512,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -2953,6 +3536,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -2966,6 +3550,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3005,6 +3590,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -3119,78 +3756,315 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", ], ), http_get=HTTPGetAction( @@ -3728,6 +4602,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3741,6 +4627,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3753,6 +4651,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3766,6 +4665,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3805,10 +4705,62 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", ), ], url="url_example", @@ -3922,6 +4874,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -3935,6 +4899,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -3947,6 +4923,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -3960,6 +4937,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3999,6 +4977,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -4753,6 +5783,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -4763,6 +5804,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -4776,6 +5818,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4814,6 +5857,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5458,6 +6553,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5471,6 +6578,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5483,6 +6602,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5496,6 +6616,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5535,6 +6656,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5656,6 +6829,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5669,6 +6854,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5681,6 +6878,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5694,6 +6892,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5733,6 +6932,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -5876,6 +7127,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -5889,6 +7152,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -5901,6 +7176,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -5914,6 +7190,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5953,6 +7230,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6045,6 +7374,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -6348,6 +7680,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6361,6 +7705,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6373,6 +7729,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6386,6 +7743,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6425,11 +7783,63 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), ], url="url_example", ), @@ -6587,6 +7997,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -6600,6 +8022,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -6612,6 +8046,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -6625,6 +8060,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -6664,6 +8100,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -6778,97 +8266,334 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", ), - ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( + host="host_example", + port="port_example", + ), + ), pre_stop=LifecycleHandler( _exec=ExecAction( command=[ @@ -7387,6 +9112,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7400,6 +9137,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -7412,6 +9161,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7425,6 +9175,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7464,6 +9215,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -7581,6 +9384,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -7594,9 +9409,21 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", service_account_key_secret=SecretKeySelector( @@ -7606,6 +9433,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -7619,6 +9447,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -7658,6 +9487,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -8731,7 +10612,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8754,6 +10635,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8766,9 +10648,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -8818,7 +10710,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8841,6 +10733,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8854,9 +10747,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -8894,7 +10797,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -8917,6 +10820,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -8931,9 +10835,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -9213,6 +11127,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9226,6 +11152,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9238,6 +11176,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9251,6 +11190,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9290,6 +11230,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9390,6 +11382,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -9427,6 +11431,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -9440,6 +11456,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -9452,6 +11480,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -9465,6 +11494,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -9504,6 +11534,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -9992,6 +12074,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -10002,6 +12095,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10015,6 +12109,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10053,6 +12148,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10697,6 +12844,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10710,6 +12869,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10722,6 +12893,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10735,6 +12907,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10774,6 +12947,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -10895,6 +13120,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -10908,6 +13145,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -10920,6 +13169,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -10933,6 +13183,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -10972,15 +13223,67 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, name="name_example", optional=True, oss=IoArgoprojWorkflowV1alpha1OSSArtifact( @@ -11115,6 +13418,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11128,6 +13443,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11140,6 +13467,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11153,6 +13481,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11192,6 +13521,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11284,6 +13665,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -11587,6 +13971,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11600,6 +13996,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11612,6 +14020,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11625,6 +14034,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11664,6 +14074,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -11826,6 +14288,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -11839,6 +14313,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -11851,6 +14337,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -11864,6 +14351,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -11903,6 +14391,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -12017,57 +14557,294 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, ), - secret_key_ref=SecretKeySelector( + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), ], env_from=[ EnvFromSource( @@ -12626,6 +15403,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12639,6 +15428,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12651,6 +15452,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12664,6 +15466,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12703,21 +15506,73 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", optional=True, ), bucket="bucket_example", @@ -12820,6 +15675,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -12833,6 +15700,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -12845,6 +15724,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -12858,6 +15738,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -12897,6 +15778,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -13651,6 +16584,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -13661,6 +16605,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -13674,6 +16619,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -13712,6 +16658,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14356,6 +17354,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14369,6 +17379,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14381,6 +17403,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14394,6 +17417,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14433,6 +17457,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14554,6 +17630,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14567,6 +17655,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14579,6 +17679,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14592,6 +17693,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14631,6 +17733,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14774,6 +17928,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -14787,6 +17953,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -14799,6 +17977,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -14812,6 +17991,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -14851,6 +18031,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -14943,6 +18175,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -15246,6 +18481,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15259,8 +18506,20 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -15271,6 +18530,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15284,6 +18544,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15323,6 +18584,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15485,6 +18798,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -15498,6 +18823,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -15510,6 +18847,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -15523,6 +18861,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -15562,6 +18901,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -15676,93 +19067,330 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( + url="url_example", + username_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", path="path_example", - port="port_example", - scheme="HTTP", ), - tcp_socket=TCPSocketAction( + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", + divisor="divisor_example", + resource="resource_example", + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + env_from=[ + EnvFromSource( + config_map_ref=ConfigMapEnvSource( + name="name_example", + optional=True, + ), + prefix="prefix_example", + secret_ref=SecretEnvSource( + name="name_example", + optional=True, + ), + ), + ], + image="image_example", + image_pull_policy="image_pull_policy_example", + lifecycle=Lifecycle( + post_start=LifecycleHandler( + _exec=ExecAction( + command=[ + "command_example", + ], + ), + http_get=HTTPGetAction( + host="host_example", + http_headers=[ + HTTPHeader( + name="name_example", + value="value_example", + ), + ], + path="path_example", + port="port_example", + scheme="HTTP", + ), + tcp_socket=TCPSocketAction( host="host_example", port="port_example", ), @@ -16285,6 +19913,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16298,6 +19938,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16310,6 +19962,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16323,6 +19976,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16362,6 +20016,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -16479,6 +20185,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -16492,6 +20210,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -16504,6 +20234,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -16517,6 +20248,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -16556,6 +20288,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -17629,7 +21413,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17652,6 +21436,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17665,9 +21450,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -17719,7 +21514,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers @@ -17742,6 +21537,7 @@ No authorization required ### Example +* Api Key Authentication (BearerToken): ```python import time import argo_workflows @@ -17756,9 +21552,19 @@ configuration = argo_workflows.Configuration( host = "http://localhost:2746" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: BearerToken +configuration.api_key['BearerToken'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['BearerToken'] = 'Bearer' # Enter a context with an instance of the API client -with argo_workflows.ApiClient() as api_client: +with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) namespace = "namespace_example" # str | @@ -18033,6 +21839,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18046,6 +21864,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18058,6 +21888,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18071,6 +21902,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18110,6 +21942,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18210,6 +22094,18 @@ with argo_workflows.ApiClient() as api_client: ), ], ), + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( config_map="config_map_example", key="key_example", @@ -18247,6 +22143,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -18260,6 +22168,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -18272,6 +22192,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18285,6 +22206,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18324,6 +22246,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -18812,6 +22786,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -18822,6 +22807,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -18835,6 +22821,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -18873,6 +22860,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19517,6 +23556,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19530,6 +23581,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19542,6 +23605,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19555,6 +23619,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19594,6 +23659,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -19715,6 +23832,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19728,6 +23857,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19740,6 +23881,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19753,6 +23895,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -19792,27 +23935,79 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( mark_deletion_after_days=1, mark_infrequent_access_after_days=1, @@ -19935,6 +24130,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -19948,6 +24155,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -19960,6 +24179,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -19973,6 +24193,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20012,6 +24233,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20104,6 +24377,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -20407,6 +24683,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20420,6 +24708,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20432,6 +24732,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20445,6 +24746,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20484,6 +24786,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20646,6 +25000,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -20659,6 +25025,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -20671,6 +25049,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -20684,6 +25063,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -20723,6 +25103,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -20837,47 +25269,284 @@ with argo_workflows.ApiClient() as api_client: "flags_example", ], manifest="manifest_example", - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ), + merge_strategy="merge_strategy_example", + set_owner_reference=True, + success_condition="success_condition_example", + ), + retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( + affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( + node_anti_affinity={}, + ), + backoff=IoArgoprojWorkflowV1alpha1Backoff( + duration="duration_example", + factor="factor_example", + max_duration="max_duration_example", + ), + expression="expression_example", + limit="limit_example", + retry_policy="retry_policy_example", + ), + scheduler_name="scheduler_name_example", + script=IoArgoprojWorkflowV1alpha1ScriptTemplate( + args=[ + "args_example", + ], + command=[ + "command_example", + ], + env=[ + EnvVar( + name="name_example", + value="value_example", + value_from=EnvVarSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + field_ref=ObjectFieldSelector( + api_version="api_version_example", + field_path="field_path_example", + ), + resource_field_ref=ResourceFieldSelector( + container_name="container_name_example", divisor="divisor_example", resource="resource_example", ), @@ -21446,6 +26115,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21459,6 +26140,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21471,6 +26164,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21484,6 +26178,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21523,17 +26218,69 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, oss=IoArgoprojWorkflowV1alpha1OSSArtifact( access_key_secret=SecretKeySelector( key="key_example", @@ -21640,6 +26387,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -21653,6 +26412,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -21665,6 +26436,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -21678,6 +26450,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -21717,6 +26490,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -22471,6 +27296,17 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( bucket="bucket_example", key="key_example", @@ -22481,6 +27317,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -22494,6 +27331,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -22532,6 +27370,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23176,6 +28066,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23189,6 +28091,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23201,6 +28115,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23214,6 +28129,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23253,6 +28169,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23374,6 +28342,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23387,6 +28367,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23399,6 +28391,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23412,6 +28405,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23451,6 +28445,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23594,6 +28640,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -23607,6 +28665,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -23619,6 +28689,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -23632,6 +28703,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -23671,6 +28743,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -23763,6 +28887,9 @@ with argo_workflows.ApiClient() as api_client: ], http=IoArgoprojWorkflowV1alpha1HTTP( body="body_example", + body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( + bytes='YQ==', + ), headers=[ IoArgoprojWorkflowV1alpha1HTTPHeader( name="name_example", @@ -24066,6 +29193,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24079,18 +29218,348 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", + depth=1, + disable_submodules=True, + fetch=[ + "fetch_example", + ], + insecure_ignore_host_key=True, + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + repo="repo_example", + revision="revision_example", + single_branch=True, + ssh_private_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + global_name="global_name_example", + hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( + addresses=[ + "addresses_example", + ], + force=True, + hdfs_user="hdfs_user_example", + krb_c_cache_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_config_config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_keytab_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + headers=[ + IoArgoprojWorkflowV1alpha1Header( + name="name_example", + value="value_example", + ), + ], + url="url_example", + ), + mode=1, + name="name_example", + optional=True, + oss=IoArgoprojWorkflowV1alpha1OSSArtifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=True, + endpoint="endpoint_example", + key="key_example", + lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( + mark_deletion_after_days=1, + mark_infrequent_access_after_days=1, + ), + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + security_token="security_token_example", + ), + path="path_example", + raw=IoArgoprojWorkflowV1alpha1RawArtifact( + data="data_example", + ), + recurse_mode=True, + s3=IoArgoprojWorkflowV1alpha1S3Artifact( + access_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + bucket="bucket_example", + create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( + object_locking=True, + ), + encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( + enable_encryption=True, + kms_encryption_context="kms_encryption_context_example", + kms_key_id="kms_key_id_example", + server_side_customer_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + endpoint="endpoint_example", + insecure=True, + key="key_example", + region="region_example", + role_arn="role_arn_example", + secret_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + use_sdk_creds=True, + ), + sub_path="sub_path_example", + ), + ], + parameters=[ + IoArgoprojWorkflowV1alpha1Parameter( + default="default_example", + description="description_example", + enum=[ + "enum_example", + ], + global_name="global_name_example", + name="name_example", + value="value_example", + value_from=IoArgoprojWorkflowV1alpha1ValueFrom( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + default="default_example", + event="event_example", + expression="expression_example", + jq_filter="jq_filter_example", + json_path="json_path_example", + parameter="parameter_example", + path="path_example", + supplied={}, + ), + ), + ], + ), + memoize=IoArgoprojWorkflowV1alpha1Memoize( + cache=IoArgoprojWorkflowV1alpha1Cache( + config_map=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + key="key_example", + max_age="max_age_example", + ), + metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + metrics=IoArgoprojWorkflowV1alpha1Metrics( + prometheus=[ + IoArgoprojWorkflowV1alpha1Prometheus( + counter=IoArgoprojWorkflowV1alpha1Counter( + value="value_example", + ), + gauge=IoArgoprojWorkflowV1alpha1Gauge( + realtime=True, + value="value_example", + ), + help="help_example", + histogram=IoArgoprojWorkflowV1alpha1Histogram( + buckets=[ + 3.14, + ], + value="value_example", + ), + labels=[ + IoArgoprojWorkflowV1alpha1MetricLabel( + key="key_example", + value="value_example", + ), + ], + name="name_example", + when="when_example", + ), + ], + ), + name="name_example", + node_selector={ + "key": "key_example", + }, + outputs=IoArgoprojWorkflowV1alpha1Outputs( + artifacts=[ + IoArgoprojWorkflowV1alpha1Artifact( + archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( + _none={}, + tar=IoArgoprojWorkflowV1alpha1TarStrategy( + compression_level=1, + ), + zip={}, + ), + archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), + artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + url="url_example", + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, + _from="_from_example", + from_expression="from_expression_example", + gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( + bucket="bucket_example", + key="key_example", + service_account_key_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24104,6 +29573,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24137,12 +29607,64 @@ with argo_workflows.ApiClient() as api_client: name="name_example", optional=True, ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + krb_realm="krb_realm_example", + krb_service_principal_name="krb_service_principal_name_example", + krb_username="krb_username_example", + path="path_example", + ), + http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24215,6 +29737,7 @@ with argo_workflows.ApiClient() as api_client: sub_path="sub_path_example", ), ], + exit_code="exit_code_example", parameters=[ IoArgoprojWorkflowV1alpha1Parameter( default="default_example", @@ -24242,61 +29765,22 @@ with argo_workflows.ApiClient() as api_client: ), ), ], + result="result_example", ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), + parallelism=1, + plugin={}, + pod_spec_patch="pod_spec_patch_example", + priority=1, + priority_class_name="priority_class_name_example", + resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( + action="action_example", + failure_condition="failure_condition_example", + flags=[ + "flags_example", ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( + manifest="manifest_example", + manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( + artifact=IoArgoprojWorkflowV1alpha1Artifact( archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( _none={}, tar=IoArgoprojWorkflowV1alpha1TarStrategy( @@ -24305,6 +29789,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -24318,6 +29814,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -24330,6 +29838,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -24343,6 +29852,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -24382,6 +29892,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -24453,49 +30015,7 @@ with argo_workflows.ApiClient() as api_client: ), sub_path="sub_path_example", ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", + ), merge_strategy="merge_strategy_example", set_owner_reference=True, success_condition="success_condition_example", @@ -25105,6 +30625,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25118,6 +30650,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25130,6 +30674,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25143,6 +30688,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25182,6 +30728,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -25299,6 +30897,18 @@ with argo_workflows.ApiClient() as api_client: zip={}, ), archive_logs=True, + artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( + pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), + service_account_name="service_account_name_example", + strategy="strategy_example", + ), artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( password_secret=SecretKeySelector( key="key_example", @@ -25312,6 +30922,18 @@ with argo_workflows.ApiClient() as api_client: optional=True, ), ), + azure=IoArgoprojWorkflowV1alpha1AzureArtifact( + account_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + blob="blob_example", + container="container_example", + endpoint="endpoint_example", + use_sdk_creds=True, + ), + deleted=True, _from="_from_example", from_expression="from_expression_example", gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( @@ -25324,6 +30946,7 @@ with argo_workflows.ApiClient() as api_client: ), ), git=IoArgoprojWorkflowV1alpha1GitArtifact( + branch="branch_example", depth=1, disable_submodules=True, fetch=[ @@ -25337,6 +30960,7 @@ with argo_workflows.ApiClient() as api_client: ), repo="repo_example", revision="revision_example", + single_branch=True, ssh_private_key_secret=SecretKeySelector( key="key_example", name="name_example", @@ -25376,6 +31000,58 @@ with argo_workflows.ApiClient() as api_client: path="path_example", ), http=IoArgoprojWorkflowV1alpha1HTTPArtifact( + auth=IoArgoprojWorkflowV1alpha1HTTPAuth( + basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( + password_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( + client_cert_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_key_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( + client_id_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + client_secret_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + endpoint_params=[ + IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( + key="key_example", + value="value_example", + ), + ], + scopes=[ + "scopes_example", + ], + token_url_secret=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), headers=[ IoArgoprojWorkflowV1alpha1Header( name="name_example", @@ -26450,7 +32126,7 @@ Name | Type | Description | Notes ### Authorization -No authorization required +[BearerToken](../README.md#BearerToken) ### HTTP request headers diff --git a/sdks/python/examples/hello-world-from-object.py b/sdks/python/examples/hello-world-from-object.py deleted file mode 100644 index 7abfd2956a26..000000000000 --- a/sdks/python/examples/hello-world-from-object.py +++ /dev/null @@ -1,33 +0,0 @@ -from pprint import pprint - -import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.container import Container -from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import \ - IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import \ - IoArgoprojWorkflowV1alpha1WorkflowSpec -from argo_workflows.model.object_meta import ObjectMeta - -configuration = argo_workflows.Configuration(host="https://127.0.0.1:2746") -configuration.verify_ssl = False - -manifest = IoArgoprojWorkflowV1alpha1Workflow( - metadata=ObjectMeta(generate_name='hello-world-'), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - entrypoint='whalesay', - templates=[ - IoArgoprojWorkflowV1alpha1Template( - name='whalesay', - container=Container( - image='docker/whalesay:latest', command=['cowsay'], args=['hello world']))])) - -api_client = argo_workflows.ApiClient(configuration) -api_instance = workflow_service_api.WorkflowServiceApi(api_client=api_client) -api_response = api_instance.create_workflow( - namespace='argo', - body=IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(workflow=manifest), - _check_return_type=False) -pprint(api_response) diff --git a/sdks/python/examples/hello-world-from-raw-yaml.py b/sdks/python/examples/hello-world-from-raw-yaml.py deleted file mode 100644 index e5d1746a77b0..000000000000 --- a/sdks/python/examples/hello-world-from-raw-yaml.py +++ /dev/null @@ -1,22 +0,0 @@ -from pprint import pprint - -import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import \ - IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -import requests -import yaml - -configuration = argo_workflows.Configuration(host="https://127.0.0.1:2746") -configuration.verify_ssl = False - -resp = requests.get('https://raw.githubusercontent.com/argoproj/argo-workflows/master/examples/hello-world.yaml') -manifest = yaml.safe_load(resp.text) - -api_client = argo_workflows.ApiClient(configuration) -api_instance = workflow_service_api.WorkflowServiceApi(api_client) -api_response = api_instance.create_workflow( - namespace='argo', - body=IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(workflow=manifest, _check_type=False), - _check_return_type=False) -pprint(api_response) diff --git a/sdks/python/sdk_version.py b/sdks/python/sdk_version.py index f513dbe28c22..76781f564846 100755 --- a/sdks/python/sdk_version.py +++ b/sdks/python/sdk_version.py @@ -26,7 +26,7 @@ print(UNTAGGED_VERSION) # this goes to sys.stdout, so it's captured by the Makefile exit(0) - rc_version_suffix = re.findall("-rc\d+", git_tag) + rc_version_suffix = re.findall("-.*", git_tag) if len(rc_version_suffix) > 0: git_tag = git_tag.replace(rc_version_suffix[0], '') version_digits = [int(i) for i in git_tag.replace(VERSION_PREFIX, '').split('.')] diff --git a/sdks/python/tests/client_unittest.py b/sdks/python/tests/client_unittest.py new file mode 100644 index 000000000000..0ba5748f66d2 --- /dev/null +++ b/sdks/python/tests/client_unittest.py @@ -0,0 +1,43 @@ +import os +import unittest +from pprint import pprint + +import argo_workflows +from argo_workflows.api import workflow_service_api +from argo_workflows.model.container import Container +from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import \ + IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import \ + IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.model.object_meta import ObjectMeta + +configuration = argo_workflows.Configuration(host="http://127.0.0.1:2746") +configuration.api_key['BearerToken'] = os.getenv("ARGO_TOKEN") + + +class ClientTest(unittest.TestCase): + + def test_create_workflow(self): + manifest = IoArgoprojWorkflowV1alpha1Workflow( + metadata=ObjectMeta(generate_name='hello-world-'), + spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( + entrypoint='whalesay', + templates=[ + IoArgoprojWorkflowV1alpha1Template( + name='whalesay', + container=Container( + image='docker/whalesay:latest', command=['cowsay'], args=['hello world']))])) + + api_client = argo_workflows.ApiClient(configuration) + api_instance = workflow_service_api.WorkflowServiceApi(api_client=api_client) + api_response = api_instance.create_workflow( + namespace='argo', + body=IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(workflow=manifest), + _check_return_type=False) + pprint(api_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/server/apiserver/accesslog/interceptor.go b/server/apiserver/accesslog/interceptor.go new file mode 100644 index 000000000000..66a19ffdba08 --- /dev/null +++ b/server/apiserver/accesslog/interceptor.go @@ -0,0 +1,30 @@ +package accesslog + +import ( + "net/http" + "time" + + log "github.com/sirupsen/logrus" +) + +// Interceptor returns a handler that provides access logging. +// +// github.com/gorilla/handlers/logging.go +// https://arunvelsriram.medium.com/simple-golang-http-logging-middleware-315656ff8722 +func Interceptor(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + t := time.Now() + + rcw := &resultCapturingWriter{ResponseWriter: w} + + h.ServeHTTP(rcw, r) + + log.WithFields(log.Fields{ + "path": r.URL.Path, // log the path not the URL, to avoid logging sensitive data that could be in the query params + "method": r.Method, // log the method, so we can differentiate create/update from get/list + "status": rcw.status, + "size": rcw.size, + "duration": time.Since(t), + }).Info() + }) +} diff --git a/server/apiserver/accesslog/result_capturing_writer.go b/server/apiserver/accesslog/result_capturing_writer.go new file mode 100644 index 000000000000..ec448cb575d9 --- /dev/null +++ b/server/apiserver/accesslog/result_capturing_writer.go @@ -0,0 +1,29 @@ +package accesslog + +import ( + "net/http" +) + +// resultCapturingWriter captures the size and status code of the response. +// Because http.response implements http.Flusher, we must do so too, otherwise Watch* methods don't work. +// We do not implement http.Hijacker, as HTTP/2 requests should not allow it. +type resultCapturingWriter struct { + http.ResponseWriter // MUST also be http.Flusher + status int + size int +} + +func (r *resultCapturingWriter) Write(b []byte) (int, error) { + size, err := r.ResponseWriter.Write(b) + r.size += size + return size, err +} + +func (r *resultCapturingWriter) WriteHeader(v int) { + r.ResponseWriter.WriteHeader(v) + r.status = v +} + +func (r *resultCapturingWriter) Flush() { + r.ResponseWriter.(http.Flusher).Flush() +} diff --git a/server/apiserver/argoserver.go b/server/apiserver/argoserver.go index 618c0b7473de..2abeaed13f0c 100644 --- a/server/apiserver/argoserver.go +++ b/server/apiserver/argoserver.go @@ -34,12 +34,12 @@ import ( eventpkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/event" eventsourcepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/eventsource" infopkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/info" - pipelinepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/pipeline" sensorpkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/sensor" workflowpkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflow" workflowarchivepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflowarchive" workflowtemplatepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflowtemplate" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/server/apiserver/accesslog" "github.com/argoproj/argo-workflows/v3/server/artifacts" "github.com/argoproj/argo-workflows/v3/server/auth" "github.com/argoproj/argo-workflows/v3/server/auth/sso" @@ -50,7 +50,6 @@ import ( "github.com/argoproj/argo-workflows/v3/server/event" "github.com/argoproj/argo-workflows/v3/server/eventsource" "github.com/argoproj/argo-workflows/v3/server/info" - pipeline "github.com/argoproj/argo-workflows/v3/server/pipeline" "github.com/argoproj/argo-workflows/v3/server/sensor" "github.com/argoproj/argo-workflows/v3/server/static" "github.com/argoproj/argo-workflows/v3/server/types" @@ -63,6 +62,10 @@ import ( "github.com/argoproj/argo-workflows/v3/workflow/artifactrepositories" "github.com/argoproj/argo-workflows/v3/workflow/events" "github.com/argoproj/argo-workflows/v3/workflow/hydrator" + + limiter "github.com/sethvargo/go-limiter" + "github.com/sethvargo/go-limiter/httplimit" + "github.com/sethvargo/go-limiter/memorystore" ) var MaxGRPCMessageSize int @@ -84,6 +87,8 @@ type argoServer struct { eventAsyncDispatch bool xframeOptions string accessControlAllowOrigin string + apiRateLimiter limiter.Store + allowedLinkProtocol []string cache *cache.ResourceCache } @@ -98,13 +103,15 @@ type ArgoServerOpts struct { // config map name ConfigName string ManagedNamespace string - SSONameSpace string + SSONamespace string HSTS bool EventOperationQueueSize int EventWorkerCount int EventAsyncDispatch bool XFrameOptions string AccessControlAllowOrigin string + APIRateLimit uint64 + AllowedLinkProtocol []string } func init() { @@ -116,14 +123,14 @@ func init() { } func getResourceCacheNamespace(opts ArgoServerOpts) string { - if opts.Namespaced { - return opts.SSONameSpace + if opts.ManagedNamespace != "" { + return opts.ManagedNamespace } return v1.NamespaceAll } func NewArgoServer(ctx context.Context, opts ArgoServerOpts) (*argoServer, error) { - configController := config.NewController(opts.Namespace, opts.ConfigName, opts.Clients.Kubernetes, emptyConfigFunc) + configController := config.NewController(opts.Namespace, opts.ConfigName, opts.Clients.Kubernetes) var resourceCache *cache.ResourceCache = nil ssoIf := sso.NullSSO if opts.AuthModes[auth.SSO] { @@ -131,19 +138,28 @@ func NewArgoServer(ctx context.Context, opts ArgoServerOpts) (*argoServer, error if err != nil { return nil, err } - ssoIf, err = sso.New(c.(*Config).SSO, opts.Clients.Kubernetes.CoreV1().Secrets(opts.Namespace), opts.BaseHRef, opts.TLSConfig != nil) + ssoIf, err = sso.New(c.SSO, opts.Clients.Kubernetes.CoreV1().Secrets(opts.Namespace), opts.BaseHRef, opts.TLSConfig != nil) if err != nil { return nil, err } - resourceCache = cache.NewResourceCache(opts.Clients.Kubernetes, ctx, getResourceCacheNamespace(opts)) + resourceCache = cache.NewResourceCache(opts.Clients.Kubernetes, getResourceCacheNamespace(opts)) + resourceCache.Run(ctx.Done()) log.Info("SSO enabled") } else { log.Info("SSO disabled") } - gatekeeper, err := auth.NewGatekeeper(opts.AuthModes, opts.Clients, opts.RestConfig, ssoIf, auth.DefaultClientForAuthorization, opts.Namespace, opts.SSONameSpace, opts.Namespaced, resourceCache) + gatekeeper, err := auth.NewGatekeeper(opts.AuthModes, opts.Clients, opts.RestConfig, ssoIf, auth.DefaultClientForAuthorization, opts.Namespace, opts.SSONamespace, opts.Namespaced, resourceCache) if err != nil { return nil, err } + store, err := memorystore.New(&memorystore.Config{ + Tokens: opts.APIRateLimit, + Interval: time.Second, + }) + if err != nil { + log.Fatal(err) + } + return &argoServer{ baseHRef: opts.BaseHRef, tlsConfig: opts.TLSConfig, @@ -160,6 +176,8 @@ func NewArgoServer(ctx context.Context, opts ArgoServerOpts) (*argoServer, error eventAsyncDispatch: opts.EventAsyncDispatch, xframeOptions: opts.XFrameOptions, accessControlAllowOrigin: opts.AccessControlAllowOrigin, + apiRateLimiter: store, + allowedLinkProtocol: opts.AllowedLinkProtocol, cache: resourceCache, }, nil } @@ -172,11 +190,14 @@ var backoff = wait.Backoff{ } func (as *argoServer) Run(ctx context.Context, port int, browserOpenFunc func(string)) { - v, err := as.configController.Get(ctx) + config, err := as.configController.Get(ctx) + if err != nil { + log.Fatal(err) + } + err = config.Sanitize(as.allowedLinkProtocol) if err != nil { log.Fatal(err) } - config := v.(*Config) log.WithFields(log.Fields{"version": argo.GetVersion().Version, "instanceID": config.InstanceID}).Info("Starting Argo Server") instanceIDService := instanceid.NewService(config.InstanceID) offloadRepo := sqldb.ExplosiveOffloadNodeStatusRepo @@ -230,7 +251,6 @@ func (as *argoServer) Run(ctx context.Context, port int, browserOpenFunc func(st httpL := tcpm.Match(cmux.HTTP1Fast()) grpcL := tcpm.Match(cmux.Any()) - go as.configController.Run(as.stopCh, as.restartOnConfigChange) go eventServer.Run(as.stopCh) go func() { as.checkServeErr("grpcServer", grpcServer.Serve(grpcL)) }() go func() { as.checkServeErr("httpServer", httpServer.Serve(httpL)) }() @@ -267,6 +287,7 @@ func (as *argoServer) newGRPCServer(instanceIDService instanceid.Service, offloa grpcutil.PanicLoggerUnaryServerInterceptor(serverLog), grpcutil.ErrorTranslationUnaryServerInterceptor, as.gatekeeper.UnaryServerInterceptor(), + grpcutil.RatelimitUnaryServerInterceptor(as.apiRateLimiter), )), grpc.StreamInterceptor(grpc_middleware.ChainStreamServer( grpc_prometheus.StreamServerInterceptor, @@ -274,6 +295,7 @@ func (as *argoServer) newGRPCServer(instanceIDService instanceid.Service, offloa grpcutil.PanicLoggerStreamServerInterceptor(serverLog), grpcutil.ErrorTranslationStreamServerInterceptor, as.gatekeeper.StreamServerInterceptor(), + grpcutil.RatelimitStreamServerInterceptor(as.apiRateLimiter), )), } @@ -282,7 +304,6 @@ func (as *argoServer) newGRPCServer(instanceIDService instanceid.Service, offloa infopkg.RegisterInfoServiceServer(grpcServer, info.NewInfoServer(as.managedNamespace, links, navColor)) eventpkg.RegisterEventServiceServer(grpcServer, eventServer) eventsourcepkg.RegisterEventSourceServiceServer(grpcServer, eventsource.NewEventSourceServer()) - pipelinepkg.RegisterPipelineServiceServer(grpcServer, pipeline.NewPipelineServer()) sensorpkg.RegisterSensorServiceServer(grpcServer, sensor.NewSensorServer()) workflowpkg.RegisterWorkflowServiceServer(grpcServer, workflow.NewWorkflowServer(instanceIDService, offloadNodeStatusRepo)) workflowtemplatepkg.RegisterWorkflowTemplateServiceServer(grpcServer, workflowtemplate.NewWorkflowTemplateServer(instanceIDService)) @@ -298,10 +319,15 @@ func (as *argoServer) newGRPCServer(instanceIDService instanceid.Service, offloa func (as *argoServer) newHTTPServer(ctx context.Context, port int, artifactServer *artifacts.ArtifactServer) *http.Server { endpoint := fmt.Sprintf("localhost:%d", port) + ratelimit_middleware, err := httplimit.NewMiddleware(as.apiRateLimiter, httplimit.IPKeyFunc()) + if err != nil { + log.Fatal(err) + } + mux := http.NewServeMux() httpServer := http.Server{ Addr: endpoint, - Handler: mux, + Handler: ratelimit_middleware.Handle(accesslog.Interceptor(mux)), TLSConfig: as.tlsConfig, } dialOpts := []grpc.DialOption{ @@ -330,7 +356,6 @@ func (as *argoServer) newHTTPServer(ctx context.Context, port int, artifactServe mustRegisterGWHandler(eventpkg.RegisterEventServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) mustRegisterGWHandler(eventsourcepkg.RegisterEventSourceServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) mustRegisterGWHandler(sensorpkg.RegisterSensorServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) - mustRegisterGWHandler(pipelinepkg.RegisterPipelineServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) mustRegisterGWHandler(workflowpkg.RegisterWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) mustRegisterGWHandler(workflowtemplatepkg.RegisterWorkflowTemplateServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) mustRegisterGWHandler(cronworkflowpkg.RegisterCronWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts) @@ -342,10 +367,15 @@ func (as *argoServer) newHTTPServer(ctx context.Context, port int, artifactServe r.Header.Del("Connection") webhookInterceptor(w, r, gwmux) }) - mux.HandleFunc("/artifacts/", artifactServer.GetOutputArtifact) - mux.HandleFunc("/input-artifacts/", artifactServer.GetInputArtifact) - mux.HandleFunc("/artifacts-by-uid/", artifactServer.GetOutputArtifactByUID) - mux.HandleFunc("/input-artifacts-by-uid/", artifactServer.GetInputArtifactByUID) + + // emergency environment variable that allows you to disable the artifact service in case of problems + if os.Getenv("ARGO_ARTIFACT_SERVER") != "false" { + mux.HandleFunc("/artifacts/", artifactServer.GetOutputArtifact) + mux.HandleFunc("/input-artifacts/", artifactServer.GetInputArtifact) + mux.HandleFunc("/artifacts-by-uid/", artifactServer.GetOutputArtifactByUID) + mux.HandleFunc("/input-artifacts-by-uid/", artifactServer.GetInputArtifactByUID) + mux.HandleFunc("/artifact-files/", artifactServer.GetArtifactFile) + } mux.Handle("/oauth2/redirect", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleRedirect))) mux.Handle("/oauth2/callback", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleCallback))) mux.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) { @@ -376,15 +406,6 @@ func mustRegisterGWHandler(register registerFunc, ctx context.Context, mux *runt } } -// Unlike the controller, the server creates object based on the config map at init time, and will not pick-up on -// changes unless we restart. -// Instead of opting to re-write the server, instead we'll just listen for any old change and restart. -func (as *argoServer) restartOnConfigChange(interface{}) error { - log.Info("config map event, exiting gracefully") - as.stopCh <- struct{}{} - return nil -} - // checkServeErr checks the error from a .Serve() call to decide if it was a graceful shutdown func (as *argoServer) checkServeErr(name string, err error) { if err != nil { diff --git a/server/apiserver/config.go b/server/apiserver/config.go deleted file mode 100644 index 62c63f1e20fc..000000000000 --- a/server/apiserver/config.go +++ /dev/null @@ -1,14 +0,0 @@ -package apiserver - -import ( - "github.com/argoproj/argo-workflows/v3/config" - "github.com/argoproj/argo-workflows/v3/server/auth/sso" -) - -var emptyConfigFunc = func() interface{} { return &Config{} } - -type Config struct { - config.Config - // SSO in settings for single-sign on - SSO sso.Config `json:"sso,omitempty"` -} diff --git a/server/artifacts/artifact_server.go b/server/artifacts/artifact_server.go index cd4369fa5132..fb6fee3b0629 100644 --- a/server/artifacts/artifact_server.go +++ b/server/artifacts/artifact_server.go @@ -4,21 +4,21 @@ import ( "context" "errors" "fmt" - "io/ioutil" + "io" + "mime" "net/http" - "os" "path" - "path/filepath" - "strconv" "strings" - "time" log "github.com/sirupsen/logrus" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + apierr "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/utils/env" + argoerrors "github.com/argoproj/argo-workflows/v3/errors" "github.com/argoproj/argo-workflows/v3/persist/sqldb" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/server/auth" @@ -26,7 +26,9 @@ import ( "github.com/argoproj/argo-workflows/v3/util/instanceid" "github.com/argoproj/argo-workflows/v3/workflow/artifactrepositories" artifact "github.com/argoproj/argo-workflows/v3/workflow/artifacts" + "github.com/argoproj/argo-workflows/v3/workflow/artifacts/common" "github.com/argoproj/argo-workflows/v3/workflow/hydrator" + "github.com/argoproj/argo-workflows/v3/workflow/util" ) type ArtifactServer struct { @@ -54,10 +56,182 @@ func (a *ArtifactServer) GetInputArtifact(w http.ResponseWriter, r *http.Request a.getArtifact(w, r, true) } +// single endpoint to be able to handle serving directories as well as files, both those that have been archived and those that haven't +// Valid requests: +// /artifact-files/{namespace}/[archived-workflows|workflows]/{id}/{nodeId}/outputs/{artifactName} +// /artifact-files/{namespace}/[archived-workflows|workflows]/{id}/{nodeId}/outputs/{artifactName}/{fileName} +// /artifact-files/{namespace}/[archived-workflows|workflows]/{id}/{nodeId}/outputs/{artifactName}/{fileDir}/.../{fileName} +// 'id' field represents 'uid' for archived workflows and 'name' for non-archived +func (a *ArtifactServer) GetArtifactFile(w http.ResponseWriter, r *http.Request) { + + const ( + namespaceIndex = 2 + archiveDiscrimIndex = 3 + idIndex = 4 + nodeIdIndex = 5 + directionIndex = 6 + artifactNameIndex = 7 + fileNameFirstIndex = 8 + ) + + var fileName *string + requestPath := strings.Split(r.URL.Path, "/") + if len(requestPath) >= fileNameFirstIndex+1 { // they included a file path in the URL (not just artifact name) + joined := strings.Join(requestPath[fileNameFirstIndex:], "/") + // sanitize file name + cleanedPath := path.Clean(joined) + fileName = &cleanedPath + } else if len(requestPath) < artifactNameIndex+1 { + a.httpBadRequestError(w) + return + } + + namespace := requestPath[namespaceIndex] + archiveDiscriminator := requestPath[archiveDiscrimIndex] + id := requestPath[idIndex] // if archiveDiscriminator == "archived-workflows", this represents workflow UID; if archiveDiscriminator == "workflows", this represents workflow name + nodeId := requestPath[nodeIdIndex] + direction := requestPath[directionIndex] + artifactName := requestPath[artifactNameIndex] + + if direction != "outputs" { // for now we just handle output artifacts + a.httpBadRequestError(w) + return + } + + // verify user is authorized + ctx, err := a.gateKeeping(r, types.NamespaceHolder(namespace)) + if err != nil { + a.unauthorizedError(w) + return + } + + var wf *wfv1.Workflow + + // retrieve the Workflow + switch archiveDiscriminator { + case "workflows": + workflowName := id + log.WithFields(log.Fields{"namespace": namespace, "workflowName": workflowName, "nodeId": nodeId, "artifactName": artifactName}).Info("Get artifact file") + + wf, err = a.getWorkflowAndValidate(ctx, namespace, workflowName) + if err != nil { + a.serverInternalError(err, w) + return + } + case "archived-workflows": + uid := id + log.WithFields(log.Fields{"namespace": namespace, "uid": uid, "nodeId": nodeId, "artifactName": artifactName}).Info("Get artifact file") + + wf, err = a.wfArchive.GetWorkflow(uid) + if err != nil { + a.serverInternalError(err, w) + return + } + + // check that the namespace passed in matches this workflow's namespace + if wf.GetNamespace() != namespace { + a.httpBadRequestError(w) + return + } + + // return 401 if the client does not have permission to get wf + err = a.validateAccess(ctx, wf) + if err != nil { + a.unauthorizedError(w) + return + } + default: + a.httpBadRequestError(w) + return + } + + artifact, driver, err := a.getArtifactAndDriver(ctx, nodeId, artifactName, false, wf, fileName) + if err != nil { + a.serverInternalError(err, w) + return + } + + isDir := strings.HasSuffix(r.URL.Path, "/") + + if !isDir { + isDir, err := driver.IsDirectory(artifact) + if err != nil { + if !argoerrors.IsCode(argoerrors.CodeNotImplemented, err) { + a.serverInternalError(err, w) + return + } + } + if isDir { + http.Redirect(w, r, r.URL.String()+"/", http.StatusTemporaryRedirect) + return + } + } + + if isDir { + // return an html page to the user + + objects, err := driver.ListObjects(artifact) + if err != nil { + a.httpFromError(err, w) + return + } + log.Debugf("this is a directory, artifact: %+v; files: %v", artifact, objects) + + key, _ := artifact.GetKey() + for _, object := range objects { + + // object is prefixed by the key, we must trim it + dir, file := path.Split(strings.TrimPrefix(object, key+"/")) + + // if dir is empty string, we are in the root dir + // we found in index.html, abort and redirect there + if dir == "" && file == "index.html" { + w.Header().Set("Location", r.URL.String()+"index.html") + w.WriteHeader(http.StatusTemporaryRedirect) + return + } + } + + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte("
    \n")) + + dirs := map[string]bool{} // to de-dupe sub-dirs + + _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", "..", ".."))) + + for _, object := range objects { + + // object is prefixed the key, we must trim it + dir, file := path.Split(strings.TrimPrefix(object, key+"/")) + + // if dir is empty string, we are in the root dir + if dir == "" { + _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", file, file))) + } else if dirs[dir] { + continue + } else { + _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", dir, dir))) + dirs[dir] = true + } + } + _, _ = w.Write([]byte("
")) + + } else { // stream the file itself + log.Debugf("not a directory, artifact: %+v", artifact) + + err = a.returnArtifact(w, artifact, driver) + + if err != nil { + a.httpFromError(err, w) + } + } + +} + func (a *ArtifactServer) getArtifact(w http.ResponseWriter, r *http.Request, isInput bool) { requestPath := strings.SplitN(r.URL.Path, "/", 6) if len(requestPath) != 6 { - a.serverInternalError(errors.New("request path is not valid"), w) + a.httpBadRequestError(w) return } namespace := requestPath[2] @@ -67,22 +241,27 @@ func (a *ArtifactServer) getArtifact(w http.ResponseWriter, r *http.Request, isI ctx, err := a.gateKeeping(r, types.NamespaceHolder(namespace)) if err != nil { - a.unauthorizedError(err, w) + a.unauthorizedError(w) return } log.WithFields(log.Fields{"namespace": namespace, "workflowName": workflowName, "nodeId": nodeId, "artifactName": artifactName, "isInput": isInput}).Info("Download artifact") wf, err := a.getWorkflowAndValidate(ctx, namespace, workflowName) + if err != nil { + a.httpFromError(err, w) + return + } + art, driver, err := a.getArtifactAndDriver(ctx, nodeId, artifactName, isInput, wf, nil) if err != nil { a.serverInternalError(err, w) return } - err = a.returnArtifact(ctx, w, r, wf, nodeId, artifactName, isInput) + err = a.returnArtifact(w, art, driver) if err != nil { - a.serverInternalError(err, w) + a.httpFromError(err, w) return } } @@ -98,7 +277,7 @@ func (a *ArtifactServer) GetInputArtifactByUID(w http.ResponseWriter, r *http.Re func (a *ArtifactServer) getArtifactByUID(w http.ResponseWriter, r *http.Request, isInput bool) { requestPath := strings.SplitN(r.URL.Path, "/", 5) if len(requestPath) != 5 { - a.serverInternalError(errors.New("request path is not valid"), w) + a.httpBadRequestError(w) return } uid := requestPath[2] @@ -108,28 +287,34 @@ func (a *ArtifactServer) getArtifactByUID(w http.ResponseWriter, r *http.Request // We need to know the namespace before we can do gate keeping wf, err := a.wfArchive.GetWorkflow(uid) if err != nil { - a.serverInternalError(err, w) + a.httpFromError(err, w) return } ctx, err := a.gateKeeping(r, types.NamespaceHolder(wf.GetNamespace())) if err != nil { - a.unauthorizedError(err, w) + a.unauthorizedError(w) return } // return 401 if the client does not have permission to get wf err = a.validateAccess(ctx, wf) if err != nil { - a.unauthorizedError(err, w) + a.unauthorizedError(w) + return + } + art, driver, err := a.getArtifactAndDriver(ctx, nodeId, artifactName, isInput, wf, nil) + if err != nil { + a.serverInternalError(err, w) return } log.WithFields(log.Fields{"uid": uid, "nodeId": nodeId, "artifactName": artifactName, "isInput": isInput}).Info("Download artifact") - err = a.returnArtifact(ctx, w, r, wf, nodeId, artifactName, isInput) + + err = a.returnArtifact(w, art, driver) if err != nil { - a.serverInternalError(err, w) + a.httpFromError(err, w) return } } @@ -150,17 +335,44 @@ func (a *ArtifactServer) gateKeeping(r *http.Request, ns types.NamespacedRequest return a.gatekeeper.ContextWithRequest(ctx, ns) } -func (a *ArtifactServer) unauthorizedError(err error, w http.ResponseWriter) { - w.WriteHeader(401) - _, _ = w.Write([]byte(err.Error())) +func (a *ArtifactServer) unauthorizedError(w http.ResponseWriter) { + http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized) } func (a *ArtifactServer) serverInternalError(err error, w http.ResponseWriter) { - w.WriteHeader(500) - _, _ = w.Write([]byte(err.Error())) + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + log.WithError(err).Error("Artifact Server returned internal error") +} + +func (a *ArtifactServer) httpBadRequestError(w http.ResponseWriter) { + http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest) +} + +func (a *ArtifactServer) httpFromError(err error, w http.ResponseWriter) { + if err == nil { + return + } + statusCode := http.StatusInternalServerError + e := &apierr.StatusError{} + if errors.As(err, &e) { // check if it's a Kubernetes API error + // There is a http error code somewhere in the error stack + statusCode = int(e.Status().Code) + } else { + // check if it's an internal ArgoError + argoerr, typeOkay := err.(argoerrors.ArgoError) + if typeOkay { + statusCode = argoerr.HTTPCode() + } + } + + http.Error(w, http.StatusText(statusCode), statusCode) + if statusCode == http.StatusInternalServerError { + log.WithError(err).Error("Artifact Server returned internal error") + } } -func (a *ArtifactServer) returnArtifact(ctx context.Context, w http.ResponseWriter, r *http.Request, wf *wfv1.Workflow, nodeId, artifactName string, isInput bool) error { +func (a *ArtifactServer) getArtifactAndDriver(ctx context.Context, nodeId, artifactName string, isInput bool, wf *wfv1.Workflow, fileName *string) (*wfv1.Artifact, common.ArtifactDriver, error) { + kubeClient := auth.GetKubeClient(ctx) var art *wfv1.Artifact @@ -170,59 +382,77 @@ func (a *ArtifactServer) returnArtifact(ctx context.Context, w http.ResponseWrit art = wf.Status.Nodes[nodeId].Outputs.GetArtifactByName(artifactName) } if art == nil { - return fmt.Errorf("artifact not found") + return nil, nil, fmt.Errorf("artifact not found: %s, isInput=%t, Workflow Status=%+v", artifactName, isInput, wf.Status) } - ar, err := a.artifactRepositories.Get(ctx, wf.Status.ArtifactRepositoryRef) - if err != nil { - return err + // Artifact Location can be defined in various places: + // 1. In the Artifact itself + // 2. Defined by Controller configmap + // 3. Workflow spec defines artifactRepositoryRef which is a ConfigMap which defines the location + // 4. Template defines ArchiveLocation + + templateName := util.GetTemplateFromNode(wf.Status.Nodes[nodeId]) + template := wf.GetTemplateByName(templateName) + if template == nil { + return nil, nil, fmt.Errorf("no template found by the name of '%s' (which is the template associated with nodeId '%s'??", templateName, nodeId) } - l := ar.ToArtifactLocation() - err = art.Relocate(l) - if err != nil { - return err + + archiveLocation := template.ArchiveLocation // this is case 4 + if !archiveLocation.HasLocation() { + ar, err := a.artifactRepositories.Get(ctx, wf.Status.ArtifactRepositoryRef) // this should handle cases 2 and 3 + if err != nil { + return art, nil, err + } + archiveLocation = ar.ToArtifactLocation() } - driver, err := a.artDriverFactory(ctx, art, resources{kubeClient, wf.Namespace}) + err := art.Relocate(archiveLocation) // if the Artifact defines the location (case 1), it will be used; otherwise whatever archiveLocation is set to if err != nil { - return err + return art, nil, err } - tmp, err := ioutil.TempFile("/tmp", "artifact") - if err != nil { - return err + if fileName != nil { + err = art.AppendToKey(*fileName) + if err != nil { + return art, nil, fmt.Errorf("error appending filename %s to key of artifact %+v: err: %v", *fileName, art, err) + } + log.Debugf("appended key %s to artifact %+v", *fileName, art) } - tmpPath := tmp.Name() - defer func() { _ = os.Remove(tmpPath) }() - err = driver.Load(art, tmpPath) + driver, err := a.artDriverFactory(ctx, art, resources{kubeClient, wf.Namespace}) if err != nil { - return err + return art, nil, err } + log.Debugf("successfully located driver associated with artifact %+v", art) - file, err := os.Open(filepath.Clean(tmpPath)) + return art, driver, nil +} + +func (a *ArtifactServer) returnArtifact(w http.ResponseWriter, art *wfv1.Artifact, driver common.ArtifactDriver) error { + stream, err := driver.OpenStream(art) if err != nil { return err } defer func() { - if err := file.Close(); err != nil { - log.Fatalf("Error closing file[%s]: %v", tmpPath, err) + if err := stream.Close(); err != nil { + log.Warningf("Error closing stream[%s]: %v", stream, err) } }() - stats, err := file.Stat() - if err != nil { - return err - } - - contentLength := strconv.FormatInt(stats.Size(), 10) - log.WithFields(log.Fields{"size": contentLength}).Debug("Artifact file size") - key, _ := art.GetKey() w.Header().Add("Content-Disposition", fmt.Sprintf(`filename="%s"`, path.Base(key))) - w.WriteHeader(200) + w.Header().Add("Content-Type", mime.TypeByExtension(path.Ext(key))) + w.Header().Add("Content-Security-Policy", env.GetString("ARGO_ARTIFACT_CONTENT_SECURITY_POLICY", "sandbox; base-uri 'none'; default-src 'none'; img-src 'self'; style-src 'self' 'unsafe-inline'")) + w.Header().Add("X-Frame-Options", env.GetString("ARGO_ARTIFACT_X_FRAME_OPTIONS", "SAMEORIGIN")) - http.ServeContent(w, r, "", time.Time{}, file) + _, err = io.Copy(w, stream) + if err != nil { + errStr := fmt.Sprintf("failed to stream artifact: %v", err) + http.Error(w, errStr, http.StatusInternalServerError) + return errors.New(errStr) + } else { + w.WriteHeader(http.StatusOK) + } return nil } diff --git a/server/artifacts/artifact_server_test.go b/server/artifacts/artifact_server_test.go index 440c1103847a..686224bb417e 100644 --- a/server/artifacts/artifact_server_test.go +++ b/server/artifacts/artifact_server_test.go @@ -1,13 +1,22 @@ package artifacts import ( + "bytes" "context" + "errors" "fmt" + "io" "io/ioutil" "net/http" + "net/http/httptest" "net/url" + "strings" "testing" + apierr "k8s.io/apimachinery/pkg/api/errors" + + argoerrors "github.com/argoproj/argo-workflows/v3/errors" + "github.com/stretchr/testify/assert" testhttp "github.com/stretchr/testify/http" "github.com/stretchr/testify/mock" @@ -44,10 +53,102 @@ func (a *fakeArtifactDriver) Load(_ *wfv1.Artifact, path string) error { return ioutil.WriteFile(path, a.data, 0o600) } +var bucketsOfKeys = map[string][]string{ + "my-bucket": { + "my-wf/my-node-1/my-s3-input-artifact.tgz", + "my-wf/my-node-1/my-s3-artifact-directory", + "my-wf/my-node-1/my-s3-artifact-directory/a.txt", + "my-wf/my-node-1/my-s3-artifact-directory/subdirectory/b.txt", + "my-wf/my-node-1/my-gcs-artifact", + "my-wf/my-node-1/my-gcs-artifact.tgz", + "my-wf/my-node-1/my-oss-artifact.zip", + "my-wf/my-node-1/my-s3-artifact.tgz", + }, + "my-bucket-2": { + "my-wf/my-node-2/my-s3-artifact-bucket-2", + }, + "my-bucket-3": { + "my-wf/my-node-2/my-s3-artifact-bucket-3", + }, + "my-bucket-4": { + "my-wf/my-node-3/my-s3-artifact.tgz", + }, +} + +func (a *fakeArtifactDriver) OpenStream(artifact *wfv1.Artifact) (io.ReadCloser, error) { + //fmt.Printf("deletethis: artifact=%+v\n", artifact) + + key, err := artifact.ArtifactLocation.GetKey() + if err != nil { + return nil, err + } + if strings.HasSuffix(key, "deletedFile.txt") { + return nil, argoerrors.New(argoerrors.CodeNotFound, "file deleted") + } else if strings.HasSuffix(key, "somethingElseWentWrong.txt") { + return nil, errors.New("whatever") + } + + if artifact.S3 != nil { + // make sure it's a recognizable bucket/key + keysInBucket, found := bucketsOfKeys[artifact.S3.Bucket] + if !found { + return nil, fmt.Errorf("artifact bucket not found: %+v", artifact) + } + foundKey := false + for _, recognizableKey := range keysInBucket { + if key == recognizableKey { + foundKey = true + break + } + } + if !foundKey { + return nil, fmt.Errorf("artifact key '%s' not found in bucket '%s'", key, artifact.S3.Bucket) + } + } + + return io.NopCloser(bytes.NewReader(a.data)), nil +} + func (a *fakeArtifactDriver) Save(_ string, _ *wfv1.Artifact) error { return fmt.Errorf("not implemented") } +func (a *fakeArtifactDriver) IsDirectory(artifact *wfv1.Artifact) (bool, error) { + key, err := artifact.GetKey() + if err != nil { + return false, err + } + + if strings.HasSuffix(key, "my-gcs-artifact.tgz") { + return false, argoerrors.New(argoerrors.CodeNotImplemented, "IsDirectory currently unimplemented for GCS") + } + + return strings.HasSuffix(key, "my-s3-artifact-directory") || strings.HasSuffix(key, "my-s3-artifact-directory/"), nil +} + +func (a *fakeArtifactDriver) ListObjects(artifact *wfv1.Artifact) ([]string, error) { + key, err := artifact.GetKey() + if err != nil { + return nil, err + } + if artifact.Name == "my-s3-artifact-directory" { + if strings.HasSuffix(key, "subdirectory") { + return []string{ + "my-wf/my-node-1/my-s3-artifact-directory/subdirectory/b.txt", + "my-wf/my-node-1/my-s3-artifact-directory/subdirectory/c.txt", + }, nil + } else { + return []string{ + "my-wf/my-node-1/my-s3-artifact-directory/a.txt", + "my-wf/my-node-1/my-s3-artifact-directory/index.html", + "my-wf/my-node-1/my-s3-artifact-directory/subdirectory/b.txt", + "my-wf/my-node-1/my-s3-artifact-directory/subdirectory/c.txt", + }, nil + } + } + return []string{}, nil +} + func newServer() *ArtifactServer { gatekeeper := &authmocks.Gatekeeper{} kube := kubefake.NewSimpleClientset() @@ -56,16 +157,36 @@ func newServer() *ArtifactServer { ObjectMeta: metav1.ObjectMeta{Namespace: "my-ns", Name: "my-wf", Labels: map[string]string{ common.LabelKeyControllerInstanceID: instanceId, }}, + Spec: wfv1.WorkflowSpec{ + Templates: []wfv1.Template{ + { + Name: "template-1", + }, + { + Name: "template-2", + ArchiveLocation: &wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + Key: "key-1", + S3Bucket: wfv1.S3Bucket{ + Bucket: "my-bucket-3", + Endpoint: "minio:9000", + }, + }, + }, + }, + }, + }, Status: wfv1.WorkflowStatus{ Nodes: wfv1.Nodes{ - "my-node": wfv1.NodeStatus{ + "my-node-1": wfv1.NodeStatus{ + TemplateName: "template-1", Inputs: &wfv1.Inputs{ Artifacts: wfv1.Artifacts{ { Name: "my-s3-input-artifact", ArtifactLocation: wfv1.ArtifactLocation{ S3: &wfv1.S3Artifact{ - Key: "my-wf/my-node/my-s3-input-artifact.tgz", + Key: "my-wf/my-node-1/my-s3-input-artifact.tgz", }, }, }, @@ -78,7 +199,16 @@ func newServer() *ArtifactServer { ArtifactLocation: wfv1.ArtifactLocation{ S3: &wfv1.S3Artifact{ // S3 is a configured artifact repo, so does not need key - Key: "my-wf/my-node/my-s3-artifact.tgz", + Key: "my-wf/my-node-1/my-s3-artifact.tgz", + }, + }, + }, + { + Name: "my-s3-artifact-directory", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + // S3 is a configured artifact repo, so does not need key + Key: "my-wf/my-node-1/my-s3-artifact-directory", }, }, }, @@ -90,7 +220,19 @@ func newServer() *ArtifactServer { GCSBucket: wfv1.GCSBucket{ Bucket: "my-bucket", }, - Key: "my-wf/my-node/my-gcs-artifact", + Key: "my-wf/my-node-1/my-gcs-artifact", + }, + }, + }, + { + Name: "my-gcs-artifact-file", + ArtifactLocation: wfv1.ArtifactLocation{ + GCS: &wfv1.GCSArtifact{ + // GCS is not a configured artifact repo, so must have bucket + GCSBucket: wfv1.GCSBucket{ + Bucket: "my-bucket", + }, + Key: "my-wf/my-node-1/my-gcs-artifact.tgz", }, }, }, @@ -102,7 +244,61 @@ func newServer() *ArtifactServer { OSSBucket: wfv1.OSSBucket{ Bucket: "my-bucket", }, - Key: "my-wf/my-node/my-oss-artifact.zip", + Key: "my-wf/my-node-1/my-oss-artifact.zip", + }, + }, + }, + }, + }, + }, + + "my-node-2": wfv1.NodeStatus{ + TemplateName: "template-2", + Outputs: &wfv1.Outputs{ + Artifacts: wfv1.Artifacts{ + { + Name: "my-s3-artifact-bucket-3", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + // S3 is a configured artifact repo, so does not need key + Key: "my-wf/my-node-2/my-s3-artifact-bucket-3", + }, + }, + }, + { + Name: "my-s3-artifact-bucket-2", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + // S3 is a configured artifact repo, so does not need key + Key: "my-wf/my-node-2/my-s3-artifact-bucket-2", + S3Bucket: wfv1.S3Bucket{ + Bucket: "my-bucket-2", + Endpoint: "minio:9000", + }, + }, + }, + }, + }, + }, + }, + + "my-node-3": wfv1.NodeStatus{ + TemplateRef: &wfv1.TemplateRef{ + Name: "my-template", + Template: "template-3", + }, + Outputs: &wfv1.Outputs{ + Artifacts: wfv1.Artifacts{ + { + Name: "my-s3-artifact", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + // S3 is a configured artifact repo, so does not need key + Key: "my-wf/my-node-3/my-s3-artifact.tgz", + S3Bucket: wfv1.S3Bucket{ + Bucket: "my-bucket-4", + Endpoint: "minio:9000", + }, }, }, }, @@ -112,6 +308,19 @@ func newServer() *ArtifactServer { // a node without input/output artifacts "my-node-no-artifacts": wfv1.NodeStatus{}, }, + StoredTemplates: map[string]wfv1.Template{ + "namespaced/my-template/template-3": { + Name: "template-3", + Outputs: wfv1.Outputs{ + Artifacts: wfv1.Artifacts{ + { + Name: "my-s3-artifact", + Path: "my-s3-artifact.tgz", + }, + }, + }, + }, + }, }, } argo := fakewfv1.NewSimpleClientset(wf, &wfv1.Workflow{ @@ -138,6 +347,107 @@ func newServer() *ArtifactServer { return newArtifactServer(gatekeeper, hydratorfake.Noop, a, instanceid.NewService(instanceId), fakeArtifactDriverFactory, artifactRepositories) } +func TestArtifactServer_GetArtifactFile(t *testing.T) { + s := newServer() + + tests := []struct { + path string + // expected results: + statusCode int + //redirect bool + location string + //success bool + isDirectory bool + directoryFiles []string // verify these files are in there, if this is a directory + }{ + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory", + statusCode: 307, // redirect + location: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/", + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/", + statusCode: 307, // redirect + location: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/index.html", + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/subdirectory/", + statusCode: 200, + isDirectory: true, + directoryFiles: []string{ + "..", + "b.txt", + "c.txt", + }, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/a.txt", + statusCode: 200, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/subdirectory/b.txt", + statusCode: 200, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/deletedFile.txt", + statusCode: 404, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-s3-artifact-directory/somethingElseWentWrong.txt", + statusCode: 500, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-1/outputs/my-gcs-artifact-file/my-gcs-artifact.tgz", + statusCode: 200, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-2/outputs/my-s3-artifact-bucket-3", + statusCode: 200, + isDirectory: false, + }, + { + path: "/artifact-files/my-ns/workflows/my-wf/my-node-2/outputs/my-s3-artifact-bucket-2", + statusCode: 200, + isDirectory: false, + }, + } + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + r := &http.Request{} + r.URL = mustParse(tt.path) + recorder := httptest.NewRecorder() + + s.GetArtifactFile(recorder, r) + assert.Equal(t, tt.statusCode, recorder.Result().StatusCode) + if tt.statusCode >= 300 && tt.statusCode <= 399 { // redirect + assert.Equal(t, tt.location, recorder.Header().Get("Location")) + } else if tt.statusCode >= 200 && tt.statusCode <= 299 { // success + all, err := io.ReadAll(recorder.Result().Body) + if err != nil { + panic(fmt.Sprintf("failed to read http body: %v", err)) + } + if tt.isDirectory { + fmt.Printf("got directory listing:\n%s\n", all) + // verify that the files are contained in the listing we got back + assert.Equal(t, len(tt.directoryFiles), strings.Count(string(all), "
  • ")) + for _, file := range tt.directoryFiles { + assert.True(t, strings.Contains(string(all), file)) + } + } else { + assert.Equal(t, "my-data", string(all)) + } + + } + }) + } +} + func TestArtifactServer_GetOutputArtifact(t *testing.T) { s := newServer() @@ -162,12 +472,49 @@ func TestArtifactServer_GetOutputArtifact(t *testing.T) { for _, tt := range tests { t.Run(tt.artifactName, func(t *testing.T) { r := &http.Request{} - r.URL = mustParse(fmt.Sprintf("/artifacts/my-ns/my-wf/my-node/%s", tt.artifactName)) - w := &testhttp.TestResponseWriter{} - s.GetOutputArtifact(w, r) - if assert.Equal(t, 200, w.StatusCode) { - assert.Equal(t, fmt.Sprintf(`filename="%s"`, tt.fileName), w.Header().Get("Content-Disposition")) - assert.Equal(t, "my-data", w.Output) + r.URL = mustParse(fmt.Sprintf("/artifacts/my-ns/my-wf/my-node-1/%s", tt.artifactName)) + recorder := httptest.NewRecorder() + + s.GetOutputArtifact(recorder, r) + if assert.Equal(t, 200, recorder.Result().StatusCode) { + assert.Equal(t, fmt.Sprintf(`filename="%s"`, tt.fileName), recorder.Header().Get("Content-Disposition")) + all, err := io.ReadAll(recorder.Result().Body) + if err != nil { + panic(fmt.Sprintf("failed to read http body: %v", err)) + } + assert.Equal(t, "my-data", string(all)) + } + }) + } +} + +func TestArtifactServer_GetOutputArtifactWithTemplate(t *testing.T) { + s := newServer() + + tests := []struct { + fileName string + artifactName string + }{ + { + fileName: "my-s3-artifact.tgz", + artifactName: "my-s3-artifact", + }, + } + + for _, tt := range tests { + t.Run(tt.artifactName, func(t *testing.T) { + r := &http.Request{} + r.URL = mustParse(fmt.Sprintf("/artifacts/my-ns/my-wf/my-node-3/%s", tt.artifactName)) + recorder := httptest.NewRecorder() + + s.GetOutputArtifact(recorder, r) + if assert.Equal(t, 200, recorder.Result().StatusCode) { + assert.Equal(t, fmt.Sprintf(`filename="%s"`, tt.fileName), recorder.Header().Get("Content-Disposition")) + all, err := io.ReadAll(recorder.Result().Body) + if err != nil { + panic(fmt.Sprintf("failed to read http body: %v", err)) + } + assert.Equal(t, "my-data", string(all)) } }) } @@ -189,12 +536,16 @@ func TestArtifactServer_GetInputArtifact(t *testing.T) { for _, tt := range tests { t.Run(tt.artifactName, func(t *testing.T) { r := &http.Request{} - r.URL = mustParse(fmt.Sprintf("/input-artifacts/my-ns/my-wf/my-node/%s", tt.artifactName)) - w := &testhttp.TestResponseWriter{} - s.GetInputArtifact(w, r) - if assert.Equal(t, 200, w.StatusCode) { - assert.Equal(t, fmt.Sprintf(`filename="%s"`, tt.fileName), w.Header().Get("Content-Disposition")) - assert.Equal(t, "my-data", w.Output) + r.URL = mustParse(fmt.Sprintf("/input-artifacts/my-ns/my-wf/my-node-1/%s", tt.artifactName)) + recorder := httptest.NewRecorder() + s.GetInputArtifact(recorder, r) + if assert.Equal(t, 200, recorder.Result().StatusCode) { + assert.Equal(t, fmt.Sprintf(`filename="%s"`, tt.fileName), recorder.Result().Header.Get("Content-Disposition")) + all, err := io.ReadAll(recorder.Result().Body) + if err != nil { + panic(fmt.Sprintf("failed to read http body: %v", err)) + } + assert.Equal(t, "my-data", string(all)) } }) } @@ -217,7 +568,7 @@ func TestArtifactServer_NodeWithoutArtifact(t *testing.T) { func TestArtifactServer_GetOutputArtifactWithoutInstanceID(t *testing.T) { s := newServer() r := &http.Request{} - r.URL = mustParse("/artifacts/my-ns/your-wf/my-node/my-artifact") + r.URL = mustParse("/artifacts/my-ns/your-wf/my-node-1/my-artifact") w := &testhttp.TestResponseWriter{} s.GetOutputArtifact(w, r) assert.NotEqual(t, 200, w.StatusCode) @@ -226,7 +577,7 @@ func TestArtifactServer_GetOutputArtifactWithoutInstanceID(t *testing.T) { func TestArtifactServer_GetOutputArtifactByUID(t *testing.T) { s := newServer() r := &http.Request{} - r.URL = mustParse("/artifacts/my-uuid/my-node/my-artifact") + r.URL = mustParse("/artifacts/my-uuid/my-node-1/my-artifact") w := &testhttp.TestResponseWriter{} s.GetOutputArtifactByUID(w, r) assert.Equal(t, 401, w.StatusCode) @@ -236,15 +587,49 @@ func TestArtifactServer_GetArtifactByUIDInvalidRequestPath(t *testing.T) { s := newServer() r := &http.Request{} // missing my-artifact part to have a valid URL - r.URL = mustParse("/input-artifacts/my-uuid/my-node") + r.URL = mustParse("/input-artifacts/my-uuid/my-node-1") w := &testhttp.TestResponseWriter{} s.GetInputArtifactByUID(w, r) // make sure there is no index out of bounds error - assert.Equal(t, 500, w.StatusCode) - assert.Equal(t, "request path is not valid", w.Output) + assert.Equal(t, 400, w.StatusCode) + assert.Contains(t, w.Output, "Bad Request") w = &testhttp.TestResponseWriter{} s.GetOutputArtifactByUID(w, r) - assert.Equal(t, 500, w.StatusCode) - assert.Equal(t, "request path is not valid", w.Output) + assert.Equal(t, 400, w.StatusCode) + assert.Contains(t, w.Output, "Bad Request") +} + +func TestArtifactServer_httpBadRequestError(t *testing.T) { + s := newServer() + w := &testhttp.TestResponseWriter{} + s.httpBadRequestError(w) + + assert.Equal(t, http.StatusBadRequest, w.StatusCode) + assert.Contains(t, w.Output, "Bad Request") +} + +func TestArtifactServer_httpFromError(t *testing.T) { + s := newServer() + w := &testhttp.TestResponseWriter{} + err := errors.New("math: square root of negative number") + + s.httpFromError(err, w) + + assert.Equal(t, http.StatusInternalServerError, w.StatusCode) + assert.Equal(t, "Internal Server Error\n", w.Output) + + w = &testhttp.TestResponseWriter{} + err = apierr.NewUnauthorized("") + + s.httpFromError(err, w) + + assert.Equal(t, http.StatusUnauthorized, w.StatusCode) + assert.Contains(t, w.Output, "Unauthorized") + + w = &testhttp.TestResponseWriter{} + err = argoerrors.New(argoerrors.CodeNotFound, "not found") + + s.httpFromError(err, w) + assert.Equal(t, http.StatusNotFound, w.StatusCode) } diff --git a/server/auth/gatekeeper.go b/server/auth/gatekeeper.go index ce936c0922b2..588ae4fef78c 100644 --- a/server/auth/gatekeeper.go +++ b/server/auth/gatekeeper.go @@ -207,7 +207,7 @@ func (s gatekeeper) getClients(ctx context.Context, req interface{}) (*servertyp return nil, nil, status.Error(codes.Unauthenticated, err.Error()) } if s.ssoIf.IsRBACEnabled() { - clients, err := s.rbacAuthorization(claims, req) + clients, err := s.rbacAuthorization(ctx, claims, req) if err != nil { log.WithError(err).Error("failed to perform RBAC authorization") return nil, nil, status.Error(codes.PermissionDenied, "not allowed") @@ -279,8 +279,8 @@ func (s *gatekeeper) canDelegateRBACToRequestNamespace(req interface{}) bool { return len(namespace) != 0 && s.ssoNamespace != namespace } -func (s *gatekeeper) getClientsForServiceAccount(claims *types.Claims, serviceAccount *corev1.ServiceAccount) (*servertypes.Clients, error) { - authorization, err := s.authorizationForServiceAccount(serviceAccount) +func (s *gatekeeper) getClientsForServiceAccount(ctx context.Context, claims *types.Claims, serviceAccount *corev1.ServiceAccount) (*servertypes.Clients, error) { + authorization, err := s.authorizationForServiceAccount(ctx, serviceAccount) if err != nil { return nil, err } @@ -289,10 +289,11 @@ func (s *gatekeeper) getClientsForServiceAccount(claims *types.Claims, serviceAc return nil, err } claims.ServiceAccountName = serviceAccount.Name + claims.ServiceAccountNamespace = serviceAccount.Namespace return clients, nil } -func (s *gatekeeper) rbacAuthorization(claims *types.Claims, req interface{}) (*servertypes.Clients, error) { +func (s *gatekeeper) rbacAuthorization(ctx context.Context, claims *types.Claims, req interface{}) (*servertypes.Clients, error) { ssoDelegationAllowed, ssoDelegated := false, false loginAccount, err := s.getServiceAccount(claims, s.ssoNamespace) if err != nil { @@ -311,14 +312,14 @@ func (s *gatekeeper) rbacAuthorization(claims *types.Claims, req interface{}) (* } // important! write an audit entry (i.e. log entry) so we know which user performed an operation log.WithFields(log.Fields{"serviceAccount": delegatedAccount.Name, "loginServiceAccount": loginAccount.Name, "subject": claims.Subject, "email": claims.Email, "ssoDelegationAllowed": ssoDelegationAllowed, "ssoDelegated": ssoDelegated}).Info("selected SSO RBAC service account for user") - return s.getClientsForServiceAccount(claims, delegatedAccount) + return s.getClientsForServiceAccount(ctx, claims, delegatedAccount) } -func (s *gatekeeper) authorizationForServiceAccount(serviceAccount *corev1.ServiceAccount) (string, error) { +func (s *gatekeeper) authorizationForServiceAccount(ctx context.Context, serviceAccount *corev1.ServiceAccount) (string, error) { if len(serviceAccount.Secrets) == 0 { return "", fmt.Errorf("expected at least one secret for SSO RBAC service account: %s", serviceAccount.GetName()) } - secret, err := s.cache.SecretLister.Secrets(serviceAccount.GetNamespace()).Get(serviceAccount.Secrets[0].Name) + secret, err := s.cache.GetSecret(ctx, serviceAccount.GetNamespace(), serviceAccount.Secrets[0].Name) if err != nil { return "", fmt.Errorf("failed to get service account secret: %w", err) } diff --git a/server/auth/gatekeeper_test.go b/server/auth/gatekeeper_test.go index 2dce0e36d9b1..b4c620a0392b 100644 --- a/server/auth/gatekeeper_test.go +++ b/server/auth/gatekeeper_test.go @@ -105,7 +105,8 @@ func TestServer_GetWFClient(t *testing.T) { }, }, ) - resourceCache := cache.NewResourceCache(kubeClient, context.TODO(), corev1.NamespaceAll) + resourceCache := cache.NewResourceCache(kubeClient, corev1.NamespaceAll) + resourceCache.Run(context.TODO().Done()) var clientForAuthorization ClientForAuthorization = func(authorization string) (*rest.Config, *servertypes.Clients, error) { return &rest.Config{}, &servertypes.Clients{Workflow: &fakewfclientset.Clientset{}, Kubernetes: &kubefake.Clientset{}}, nil } @@ -177,15 +178,17 @@ func TestServer_GetWFClient(t *testing.T) { if assert.NoError(t, err) { assert.NotEqual(t, clients, GetWfClient(ctx)) assert.NotEqual(t, kubeClient, GetKubeClient(ctx)) - if assert.NotNil(t, GetClaims(ctx)) { - assert.Equal(t, []string{"my-group", "other-group"}, GetClaims(ctx).Groups) - assert.Equal(t, "my-sa", GetClaims(ctx).ServiceAccountName) + claims := GetClaims(ctx) + if assert.NotNil(t, claims) { + assert.Equal(t, []string{"my-group", "other-group"}, claims.Groups) + assert.Equal(t, "my-sa", claims.ServiceAccountName) + assert.Equal(t, "my-ns", claims.ServiceAccountNamespace) } assert.Equal(t, "my-sa", hook.LastEntry().Data["serviceAccount"]) } } }) - t.Run("SSO+RBAC, Namespace delegation ON, precedence=2, Delagated", func(t *testing.T) { + t.Run("SSO+RBAC, Namespace delegation ON, precedence=2, Delegated", func(t *testing.T) { os.Setenv("SSO_DELEGATE_RBAC_TO_NAMESPACE", "true") ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) @@ -196,9 +199,11 @@ func TestServer_GetWFClient(t *testing.T) { if assert.NoError(t, err) { assert.NotEqual(t, clients, GetWfClient(ctx)) assert.NotEqual(t, kubeClient, GetKubeClient(ctx)) - if assert.NotNil(t, GetClaims(ctx)) { - assert.Equal(t, []string{"my-group", "other-group"}, GetClaims(ctx).Groups) - assert.Equal(t, "user1-sa", GetClaims(ctx).ServiceAccountName) + claims := GetClaims(ctx) + if assert.NotNil(t, claims) { + assert.Equal(t, []string{"my-group", "other-group"}, claims.Groups) + assert.Equal(t, "user1-sa", claims.ServiceAccountName) + assert.Equal(t, "user1-ns", claims.ServiceAccountNamespace) } assert.Equal(t, "user1-sa", hook.LastEntry().Data["serviceAccount"]) } @@ -215,9 +220,11 @@ func TestServer_GetWFClient(t *testing.T) { if assert.NoError(t, err) { assert.NotEqual(t, clients, GetWfClient(ctx)) assert.NotEqual(t, kubeClient, GetKubeClient(ctx)) - if assert.NotNil(t, GetClaims(ctx)) { - assert.Equal(t, []string{"my-group", "other-group"}, GetClaims(ctx).Groups) - assert.Equal(t, "my-sa", GetClaims(ctx).ServiceAccountName) + claims := GetClaims(ctx) + if assert.NotNil(t, claims) { + assert.Equal(t, []string{"my-group", "other-group"}, claims.Groups) + assert.Equal(t, "my-sa", claims.ServiceAccountName) + assert.Equal(t, "my-ns", claims.ServiceAccountNamespace) } assert.Equal(t, "my-sa", hook.LastEntry().Data["serviceAccount"]) } @@ -234,9 +241,11 @@ func TestServer_GetWFClient(t *testing.T) { if assert.NoError(t, err) { assert.NotEqual(t, clients, GetWfClient(ctx)) assert.NotEqual(t, kubeClient, GetKubeClient(ctx)) - if assert.NotNil(t, GetClaims(ctx)) { - assert.Equal(t, []string{"my-group", "other-group"}, GetClaims(ctx).Groups) - assert.Equal(t, "my-sa", GetClaims(ctx).ServiceAccountName) + claims := GetClaims(ctx) + if assert.NotNil(t, claims) { + assert.Equal(t, []string{"my-group", "other-group"}, claims.Groups) + assert.Equal(t, "my-sa", claims.ServiceAccountName) + assert.Equal(t, "my-ns", claims.ServiceAccountNamespace) } assert.Equal(t, "my-sa", hook.LastEntry().Data["serviceAccount"]) } @@ -254,9 +263,11 @@ func TestServer_GetWFClient(t *testing.T) { if assert.NoError(t, err) { assert.NotEqual(t, clients, GetWfClient(ctx)) assert.NotEqual(t, kubeClient, GetKubeClient(ctx)) - if assert.NotNil(t, GetClaims(ctx)) { - assert.Equal(t, []string{"my-group", "other-group"}, GetClaims(ctx).Groups) - assert.Equal(t, "my-sa", GetClaims(ctx).ServiceAccountName) + claims := GetClaims(ctx) + if assert.NotNil(t, claims) { + assert.Equal(t, []string{"my-group", "other-group"}, claims.Groups) + assert.Equal(t, "my-sa", claims.ServiceAccountName) + assert.Equal(t, "my-ns", claims.ServiceAccountNamespace) } assert.Equal(t, "my-sa", hook.LastEntry().Data["serviceAccount"]) } diff --git a/server/auth/mocks/Gatekeeper.go b/server/auth/mocks/Gatekeeper.go index 18d8851178c5..f4a570579ab3 100644 --- a/server/auth/mocks/Gatekeeper.go +++ b/server/auth/mocks/Gatekeeper.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.9.4. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/server/auth/rbac/config.go b/server/auth/rbac/config.go deleted file mode 100644 index 51b7534f5104..000000000000 --- a/server/auth/rbac/config.go +++ /dev/null @@ -1,9 +0,0 @@ -package rbac - -type Config struct { - Enabled bool `json:"enabled,omitempty"` -} - -func (c *Config) IsEnabled() bool { - return c != nil && c.Enabled -} diff --git a/server/auth/sso/mocks/Interface.go b/server/auth/sso/mocks/Interface.go index bb94157d6521..a2ab5d3eb9c1 100644 --- a/server/auth/sso/mocks/Interface.go +++ b/server/auth/sso/mocks/Interface.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.9.4. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/server/auth/sso/sso.go b/server/auth/sso/sso.go index e49b8a52ece8..ea6acc5e3233 100644 --- a/server/auth/sso/sso.go +++ b/server/auth/sso/sso.go @@ -12,6 +12,8 @@ import ( "strings" "time" + "github.com/argoproj/argo-workflows/v3/config" + pkgrand "github.com/argoproj/pkg/rand" "github.com/coreos/go-oidc/v3/oidc" "github.com/go-jose/go-jose/v3" @@ -23,7 +25,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" corev1 "k8s.io/client-go/kubernetes/typed/core/v1" - "github.com/argoproj/argo-workflows/v3/server/auth/rbac" "github.com/argoproj/argo-workflows/v3/server/auth/types" ) @@ -45,6 +46,8 @@ type Interface interface { var _ Interface = &sso{} +type Config = config.SSOConfig + type sso struct { config *oauth2.Config issuer string @@ -54,7 +57,7 @@ type sso struct { secure bool privateKey crypto.PrivateKey encrypter jose.Encrypter - rbacConfig *rbac.Config + rbacConfig *config.RBACConfig expiry time.Duration customClaimName string userInfoPath string @@ -64,29 +67,6 @@ func (s *sso) IsRBACEnabled() bool { return s.rbacConfig.IsEnabled() } -type Config struct { - Issuer string `json:"issuer"` - IssuerAlias string `json:"issuerAlias,omitempty"` - ClientID apiv1.SecretKeySelector `json:"clientId"` - ClientSecret apiv1.SecretKeySelector `json:"clientSecret"` - RedirectURL string `json:"redirectUrl"` - RBAC *rbac.Config `json:"rbac,omitempty"` - // additional scopes (on top of "openid") - Scopes []string `json:"scopes,omitempty"` - SessionExpiry metav1.Duration `json:"sessionExpiry,omitempty"` - // customGroupClaimName will override the groups claim name - CustomGroupClaimName string `json:"customGroupClaimName,omitempty"` - UserInfoPath string `json:"userInfoPath,omitempty"` - InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` -} - -func (c Config) GetSessionExpiry() time.Duration { - if c.SessionExpiry.Duration > 0 { - return c.SessionExpiry.Duration - } - return 10 * time.Hour -} - // Abstract methods of oidc.Provider that our code uses into an interface. That // will allow us to implement a stub for unit testing. If you start using more // oidc.Provider methods in this file, add them here and provide a stub @@ -243,7 +223,6 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { http.SetCookie(w, &http.Cookie{Name: state, MaxAge: 0}) if err != nil { w.WriteHeader(400) - _, _ = w.Write([]byte(fmt.Sprintf("invalid state: %v", err))) return } redirectOption := oauth2.SetAuthURLParam("redirect_uri", s.getRedirectUrl(r)) @@ -252,25 +231,21 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { oauth2Token, err := s.config.Exchange(oauth2Context, r.URL.Query().Get("code"), redirectOption) if err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to exchange token: %v", err))) return } rawIDToken, ok := oauth2Token.Extra("id_token").(string) if !ok { w.WriteHeader(401) - _, _ = w.Write([]byte("failed to get id_token")) return } idToken, err := s.idTokenVerifier.Verify(ctx, rawIDToken) if err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to verify token: %v", err))) return } c := &types.Claims{} if err := idToken.Claims(c); err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to get claims: %v", err))) return } @@ -281,7 +256,6 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { groups, err = c.GetCustomGroup(s.customClaimName) if err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to get custom claim: %v", err))) return } } @@ -292,7 +266,6 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { groups, err = c.GetUserInfoGroups(oauth2Token.AccessToken, s.issuer, s.userInfoPath) if err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to get groups claim: %v", err))) return } } @@ -303,18 +276,18 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { Subject: c.Subject, Expiry: jwt.NewNumericDate(time.Now().Add(s.expiry)), }, - Groups: groups, - RawClaim: c.RawClaim, - Email: c.Email, - EmailVerified: c.EmailVerified, - ServiceAccountName: c.ServiceAccountName, - PreferredUsername: c.PreferredUsername, + Groups: groups, + RawClaim: c.RawClaim, + Email: c.Email, + EmailVerified: c.EmailVerified, + ServiceAccountName: c.ServiceAccountName, + PreferredUsername: c.PreferredUsername, + ServiceAccountNamespace: c.ServiceAccountNamespace, } raw, err := jwt.Encrypted(s.encrypter).Claims(argoClaims).CompactSerialize() if err != nil { w.WriteHeader(401) - _, _ = w.Write([]byte(fmt.Sprintf("failed to encode claims: %v", err))) return } value := Prefix + raw diff --git a/server/auth/types/claims.go b/server/auth/types/claims.go index 52f1ec900622..f1f2a60d2f0a 100644 --- a/server/auth/types/claims.go +++ b/server/auth/types/claims.go @@ -10,12 +10,13 @@ import ( type Claims struct { jwt.Claims - Groups []string `json:"groups,omitempty"` - Email string `json:"email,omitempty"` - EmailVerified bool `json:"email_verified,omitempty"` - ServiceAccountName string `json:"service_account_name,omitempty"` - PreferredUsername string `json:"preferred_username,omitempty"` - RawClaim map[string]interface{} `json:"-"` + Groups []string `json:"groups,omitempty"` + Email string `json:"email,omitempty"` + EmailVerified bool `json:"email_verified,omitempty"` + ServiceAccountName string `json:"service_account_name,omitempty"` + ServiceAccountNamespace string `json:"service_account_namespace,omitempty"` + PreferredUsername string `json:"preferred_username,omitempty"` + RawClaim map[string]interface{} `json:"-"` } type UserInfo struct { diff --git a/server/cache/cache.go b/server/cache/cache.go index cd5540efb03f..4dcf99335073 100644 --- a/server/cache/cache.go +++ b/server/cache/cache.go @@ -1,26 +1,6 @@ package cache -import ( - "context" - "time" - - "k8s.io/client-go/informers" - "k8s.io/client-go/kubernetes" - v1 "k8s.io/client-go/listers/core/v1" -) - -type ResourceCache struct { - v1.ServiceAccountLister - v1.SecretLister -} - -func NewResourceCache(client kubernetes.Interface, ctx context.Context, namespace string) *ResourceCache { - informerFactory := informers.NewSharedInformerFactoryWithOptions(client, time.Minute*20, informers.WithNamespace(namespace)) - cache := &ResourceCache{ - ServiceAccountLister: informerFactory.Core().V1().ServiceAccounts().Lister(), - SecretLister: informerFactory.Core().V1().Secrets().Lister(), - } - informerFactory.Start(ctx.Done()) - informerFactory.WaitForCacheSync(ctx.Done()) - return cache +type Interface interface { + Get(key string) (any, bool) + Add(key string, value any) } diff --git a/server/cache/lru_ttl_cache.go b/server/cache/lru_ttl_cache.go new file mode 100644 index 000000000000..381f75ff9bdd --- /dev/null +++ b/server/cache/lru_ttl_cache.go @@ -0,0 +1,44 @@ +package cache + +import ( + "time" + + "k8s.io/utils/lru" +) + +var currentTime = time.Now + +type lruTtlCache struct { + timeout time.Duration + cache *lru.Cache +} + +type item struct { + expiryTime time.Time + value any +} + +func NewLRUTtlCache(timeout time.Duration, size int) *lruTtlCache { + return &lruTtlCache{ + timeout: timeout, + cache: lru.New(size), + } +} + +func (c *lruTtlCache) Get(key string) (any, bool) { + if data, ok := c.cache.Get(key); ok { + item := data.(*item) + if currentTime().Before(item.expiryTime) { + return item.value, true + } + c.cache.Remove(key) + } + return nil, false +} + +func (c *lruTtlCache) Add(key string, value any) { + c.cache.Add(key, &item{ + expiryTime: currentTime().Add(c.timeout), + value: value, + }) +} diff --git a/server/cache/lru_ttl_cache_test.go b/server/cache/lru_ttl_cache_test.go new file mode 100644 index 000000000000..1600645589e3 --- /dev/null +++ b/server/cache/lru_ttl_cache_test.go @@ -0,0 +1,64 @@ +package cache + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestNewTimedCache(t *testing.T) { + + t.Run("NewLRUTtlCache should return a new instance", func(t *testing.T) { + cache := NewLRUTtlCache(time.Second, 1) + assert.NotNil(t, cache) + }) + + t.Run("TimedCache should cache based on LRU size", func(t *testing.T) { + cache := NewLRUTtlCache(time.Second*10, 2) + cache.Add("one", "one") + cache.Add("two", "two") + + // Both "one" and "two" should be available since maxSize is 2 + _, ok := cache.Get("one") + assert.True(t, ok) + + _, ok = cache.Get("two") + assert.True(t, ok) + + // "three" should be available since its newly added + cache.Add("three", "three") + _, ok = cache.Get("three") + assert.True(t, ok) + + // "one" should not be available since maxSize is 2 + _, ok = cache.Get("one") + assert.False(t, ok) + }) + + t.Run("TimedCache should cache based on timeout", func(t *testing.T) { + tempCurrentTime := currentTime + + cache := NewLRUTtlCache(time.Minute*1, 2) + + currentTime = getTimeFunc(0, 0) + cache.Add("one", "one") + + currentTime = getTimeFunc(0, 30) + _, ok := cache.Get("one") + assert.True(t, ok) + + currentTime = getTimeFunc(1, 30) + // "one" should not be available since timeout is 1 min + _, ok = cache.Get("one") + assert.False(t, ok) + currentTime = tempCurrentTime + }) + +} + +func getTimeFunc(min int, sec int) func() time.Time { + return func() time.Time { + return time.Date(0, 0, 0, 0, min, sec, 0, time.UTC) + } +} diff --git a/server/cache/resource_cache.go b/server/cache/resource_cache.go new file mode 100644 index 000000000000..c9b4e7126f35 --- /dev/null +++ b/server/cache/resource_cache.go @@ -0,0 +1,65 @@ +package cache + +import ( + "context" + "time" + + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + "k8s.io/client-go/informers" + "k8s.io/client-go/kubernetes" + v1 "k8s.io/client-go/listers/core/v1" +) + +type ResourceCache struct { + cache Interface + client kubernetes.Interface + v1.ServiceAccountLister + informerFactory informers.SharedInformerFactory +} + +func NewResourceCacheWithTimeout(client kubernetes.Interface, namespace string, timeout time.Duration) *ResourceCache { + informerFactory := informers.NewSharedInformerFactoryWithOptions(client, time.Minute*20, informers.WithNamespace(namespace)) + cache := &ResourceCache{ + cache: NewLRUTtlCache(timeout, 2000), + client: client, + ServiceAccountLister: informerFactory.Core().V1().ServiceAccounts().Lister(), + informerFactory: informerFactory, + } + return cache +} + +func NewResourceCache(client kubernetes.Interface, namespace string) *ResourceCache { + return NewResourceCacheWithTimeout(client, namespace, time.Minute*1) +} + +func (c *ResourceCache) Run(stopCh <-chan struct{}) { + c.informerFactory.Start(stopCh) + c.informerFactory.WaitForCacheSync(stopCh) +} + +func (c *ResourceCache) GetSecret(ctx context.Context, namespace string, secretName string) (*corev1.Secret, error) { + cacheKey := c.getSecretCacheKey(namespace, secretName) + if secret, ok := c.cache.Get(cacheKey); ok { + if secret, ok := secret.(*corev1.Secret); ok { + return secret, nil + } + } + + secret, err := c.getSecretFromServer(ctx, namespace, secretName) + if err != nil { + return nil, err + } + + c.cache.Add(cacheKey, secret) + return secret, nil +} + +func (c *ResourceCache) getSecretFromServer(ctx context.Context, namespace string, secretName string) (*corev1.Secret, error) { + return c.client.CoreV1().Secrets(namespace).Get(ctx, secretName, metav1.GetOptions{}) +} + +func (c *ResourceCache) getSecretCacheKey(namespace string, secretName string) string { + return namespace + ":secret:" + secretName +} diff --git a/server/cache/cache_test.go b/server/cache/resource_cache_test.go similarity index 93% rename from server/cache/cache_test.go rename to server/cache/resource_cache_test.go index 42407d5391f5..ae93ce51c9a4 100644 --- a/server/cache/cache_test.go +++ b/server/cache/resource_cache_test.go @@ -75,7 +75,9 @@ func TestServer_K8sUtilsCache(t *testing.T) { "token": {}, }, }) - cache := NewResourceCache(kubeClient, context.TODO(), v1.NamespaceAll) + cache := NewResourceCache(kubeClient, v1.NamespaceAll) + ctx := context.TODO() + cache.Run(ctx.Done()) t.Run("List Service Accounts in different namespaces", func(t *testing.T) { sa, _ := cache.ServiceAccountLister.ServiceAccounts("ns1").List(labels.Everything()) @@ -87,7 +89,7 @@ func TestServer_K8sUtilsCache(t *testing.T) { assert.Equal(t, 1, len(sa)) assert.True(t, checkServiceAccountExists(sa, "sa3")) - secrets, _ := cache.SecretLister.Secrets("ns1").List(labels.Everything()) - assert.Equal(t, 1, len(secrets)) + secret, _ := cache.GetSecret(ctx, "ns1", "s1") + assert.NotNil(t, secret) }) } diff --git a/server/clusterworkflowtemplate/cluster_workflow_template_server.go b/server/clusterworkflowtemplate/cluster_workflow_template_server.go index 3966361e1fc4..cdf38dec8ff7 100644 --- a/server/clusterworkflowtemplate/cluster_workflow_template_server.go +++ b/server/clusterworkflowtemplate/cluster_workflow_template_server.go @@ -32,7 +32,7 @@ func (cwts *ClusterWorkflowTemplateServer) CreateClusterWorkflowTemplate(ctx con cwts.instanceIDService.Label(req.Template) creator.Label(ctx, req.Template) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err := validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{}) + err := validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{}) if err != nil { return nil, err } @@ -97,7 +97,7 @@ func (cwts *ClusterWorkflowTemplateServer) LintClusterWorkflowTemplate(ctx conte wfClient := auth.GetWfClient(ctx) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err := validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{Lint: true}) + err := validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{Lint: true}) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func (cwts *ClusterWorkflowTemplateServer) UpdateClusterWorkflowTemplate(ctx con wfClient := auth.GetWfClient(ctx) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err = validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{}) + err = validate.ValidateClusterWorkflowTemplate(nil, cwftmplGetter, req.Template, validate.ValidateOpts{}) if err != nil { return nil, err } diff --git a/server/eventsource/event_source_server.go b/server/eventsource/event_source_server.go index 9f869229ae8d..303f82bac4a9 100644 --- a/server/eventsource/event_source_server.go +++ b/server/eventsource/event_source_server.go @@ -69,8 +69,10 @@ func (e *eventSourceServer) EventSourcesLogs(in *eventsourcepkg.EventSourcesLogs if in.Name != "" { labelSelector += "=" + in.Name } + ctx := svr.Context() return logs.LogPods( - svr.Context(), + ctx, + auth.GetKubeClient(ctx), in.Namespace, labelSelector, in.Grep, diff --git a/server/info/info_server.go b/server/info/info_server.go index 66c9671a5172..3f14e819d29b 100644 --- a/server/info/info_server.go +++ b/server/info/info_server.go @@ -4,6 +4,8 @@ import ( "context" "os" + log "github.com/sirupsen/logrus" + "github.com/argoproj/argo-workflows/v3" infopkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/info" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" @@ -20,12 +22,13 @@ func (i *infoServer) GetUserInfo(ctx context.Context, _ *infopkg.GetUserInfoRequ claims := auth.GetClaims(ctx) if claims != nil { return &infopkg.GetUserInfoResponse{ - Subject: claims.Subject, - Issuer: claims.Issuer, - Groups: claims.Groups, - Email: claims.Email, - EmailVerified: claims.EmailVerified, - ServiceAccountName: claims.ServiceAccountName, + Subject: claims.Subject, + Issuer: claims.Issuer, + Groups: claims.Groups, + Email: claims.Email, + EmailVerified: claims.EmailVerified, + ServiceAccountName: claims.ServiceAccountName, + ServiceAccountNamespace: claims.ServiceAccountNamespace, }, nil } return &infopkg.GetUserInfoResponse{}, nil @@ -50,6 +53,22 @@ func (i *infoServer) GetVersion(context.Context, *infopkg.GetVersionRequest) (*w return &version, nil } +func (i *infoServer) CollectEvent(ctx context.Context, req *infopkg.CollectEventRequest) (*infopkg.CollectEventResponse, error) { + logFields := log.Fields{} + + claims := auth.GetClaims(ctx) + if claims != nil { + logFields["subject"] = claims.Subject + logFields["email"] = claims.Email + } + + logFields["name"] = req.Name + + log.WithFields(logFields).Info("tracking UI usage️️") + + return &infopkg.CollectEventResponse{}, nil +} + func NewInfoServer(managedNamespace string, links []*wfv1.Link, navColor string) infopkg.InfoServiceServer { return &infoServer{managedNamespace, links, navColor} } diff --git a/server/pipeline/pipeline_server.go b/server/pipeline/pipeline_server.go deleted file mode 100644 index fae785fc0107..000000000000 --- a/server/pipeline/pipeline_server.go +++ /dev/null @@ -1,179 +0,0 @@ -package pipeline - -import ( - "context" - "io" - - dfv1 "github.com/argoproj-labs/argo-dataflow/api/v1alpha1" - corev1 "k8s.io/api/core/v1" - apierr "k8s.io/apimachinery/pkg/api/errors" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "k8s.io/apimachinery/pkg/runtime" - - pipelinepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/pipeline" - "github.com/argoproj/argo-workflows/v3/server/auth" - "github.com/argoproj/argo-workflows/v3/util/logs" -) - -type server struct{} - -func (s *server) ListPipelines(ctx context.Context, req *pipelinepkg.ListPipelinesRequest) (*dfv1.PipelineList, error) { - client := auth.GetDynamicClient(ctx) - opts := metav1.ListOptions{} - if req.ListOptions != nil { - opts = *req.ListOptions - } - list, err := client.Resource(dfv1.PipelineGroupVersionResource).Namespace(req.Namespace).List(ctx, opts) - if err != nil { - return nil, err - } - items := make([]dfv1.Pipeline, len(list.Items)) - for i, un := range list.Items { - if err := runtime.DefaultUnstructuredConverter.FromUnstructured(un.Object, &items[i]); err != nil { - return nil, err - } - } - return &dfv1.PipelineList{Items: items}, nil -} - -func (s *server) WatchPipelines(req *pipelinepkg.ListPipelinesRequest, svr pipelinepkg.PipelineService_WatchPipelinesServer) error { - ctx := svr.Context() - client := auth.GetDynamicClient(ctx) - opts := metav1.ListOptions{} - if req.ListOptions != nil { - opts = *req.ListOptions - } - watcher, err := client.Resource(dfv1.PipelineGroupVersionResource).Namespace(req.Namespace).Watch(ctx, opts) - if err != nil { - return err - } - defer watcher.Stop() - for { - select { - case <-ctx.Done(): - return ctx.Err() - case event, open := <-watcher.ResultChan(): - if !open { - return io.EOF - } - un, ok := event.Object.(*unstructured.Unstructured) - if !ok { - return apierr.FromObject(event.Object) - } - pl := &dfv1.Pipeline{} - if err := runtime.DefaultUnstructuredConverter.FromUnstructured(un.Object, pl); err != nil { - return err - } - if err := svr.Send(&pipelinepkg.PipelineWatchEvent{Type: string(event.Type), Object: pl}); err != nil { - return err - } - } - } -} - -func (s *server) GetPipeline(ctx context.Context, req *pipelinepkg.GetPipelineRequest) (*dfv1.Pipeline, error) { - client := auth.GetDynamicClient(ctx) - opts := metav1.GetOptions{} - if req.GetOptions != nil { - opts = *req.GetOptions - } - un, err := client.Resource(dfv1.PipelineGroupVersionResource).Namespace(req.Namespace).Get(ctx, req.Name, opts) - if err != nil { - return nil, err - } - item := &dfv1.Pipeline{} - return item, runtime.DefaultUnstructuredConverter.FromUnstructured(un.Object, item) -} - -func (s *server) RestartPipeline(ctx context.Context, req *pipelinepkg.RestartPipelineRequest) (*pipelinepkg.RestartPipelineResponse, error) { - client := auth.GetKubeClient(ctx) - err := client.CoreV1().Pods(req.Namespace).DeleteCollection( - ctx, - metav1.DeleteOptions{}, - metav1.ListOptions{LabelSelector: dfv1.KeyPipelineName + "=" + req.Name}, - ) - if err != nil { - return nil, err - } - return &pipelinepkg.RestartPipelineResponse{}, nil -} - -func (s *server) DeletePipeline(ctx context.Context, req *pipelinepkg.DeletePipelineRequest) (*pipelinepkg.DeletePipelineResponse, error) { - client := auth.GetDynamicClient(ctx) - opts := metav1.DeleteOptions{} - if req.DeleteOptions != nil { - opts = *req.DeleteOptions - } - err := client.Resource(dfv1.PipelineGroupVersionResource).Namespace(req.Namespace).Delete(ctx, req.Name, opts) - if err != nil { - return nil, err - } - return &pipelinepkg.DeletePipelineResponse{}, nil -} - -func (s *server) PipelineLogs(in *pipelinepkg.PipelineLogsRequest, svr pipelinepkg.PipelineService_PipelineLogsServer) error { - labelSelector := dfv1.KeyPipelineName - if in.Name != "" { - labelSelector += "=" + in.Name - } - if in.StepName != "" { - labelSelector += "," + dfv1.KeyStepName + "=" + in.StepName - } - return logs.LogPods( - svr.Context(), - in.Namespace, - labelSelector, - in.Grep, - in.PodLogOptions, - func(pod *corev1.Pod, data []byte) error { - now := metav1.Now() - return svr.Send(&pipelinepkg.LogEntry{ - Namespace: pod.Namespace, - PipelineName: pod.Labels[dfv1.KeyPipelineName], - StepName: pod.Labels[dfv1.KeyStepName], - Time: &now, - Msg: string(data), - }) - }, - ) -} - -func (s *server) WatchSteps(req *pipelinepkg.WatchStepRequest, svr pipelinepkg.PipelineService_WatchStepsServer) error { - ctx := svr.Context() - client := auth.GetDynamicClient(ctx) - opts := metav1.ListOptions{} - if req.ListOptions != nil { - opts = *req.ListOptions - } - watcher, err := client.Resource(dfv1.StepGroupVersionResource).Namespace(req.Namespace).Watch(ctx, opts) - if err != nil { - return err - } - defer watcher.Stop() - for { - select { - case <-ctx.Done(): - return ctx.Err() - case event, open := <-watcher.ResultChan(): - if !open { - return io.EOF - } - un, ok := event.Object.(*unstructured.Unstructured) - if !ok { - return apierr.FromObject(event.Object) - } - step := &dfv1.Step{} - if err := runtime.DefaultUnstructuredConverter.FromUnstructured(un.Object, step); err != nil { - return err - } - if err := svr.Send(&pipelinepkg.StepWatchEvent{Type: string(event.Type), Object: step}); err != nil { - return err - } - } - } -} - -func NewPipelineServer() pipelinepkg.PipelineServiceServer { - return &server{} -} diff --git a/server/sensor/sensor_server.go b/server/sensor/sensor_server.go index e91cb2b18758..d8e448d40df8 100644 --- a/server/sensor/sensor_server.go +++ b/server/sensor/sensor_server.go @@ -54,8 +54,10 @@ func (s *sensorServer) SensorsLogs(in *sensorpkg.SensorsLogsRequest, svr sensorp if in.Name != "" { labelSelector += "=" + in.Name } + ctx := svr.Context() return logs.LogPods( - svr.Context(), + ctx, + auth.GetKubeClient(ctx), in.Namespace, labelSelector, in.Grep, diff --git a/server/workflow/workflow_server.go b/server/workflow/workflow_server.go index 40f50e2ded7d..ca5ad1e0665b 100644 --- a/server/workflow/workflow_server.go +++ b/server/workflow/workflow_server.go @@ -12,6 +12,7 @@ import ( corev1 "k8s.io/api/core/v1" apierr "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" "github.com/argoproj/argo-workflows/v3/errors" "github.com/argoproj/argo-workflows/v3/persist/sqldb" @@ -62,7 +63,7 @@ func (s *workflowServer) CreateWorkflow(ctx context.Context, req *workflowpkg.Wo wftmplGetter := templateresolution.WrapWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().WorkflowTemplates(req.Namespace)) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err := validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, req.Workflow, validate.ValidateOpts{}) + err := validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, req.Workflow, validate.ValidateOpts{}) if err != nil { return nil, err } @@ -298,6 +299,12 @@ func (s *workflowServer) DeleteWorkflow(ctx context.Context, req *workflowpkg.Wo if err != nil { return nil, err } + if req.Force { + _, err := auth.GetWfClient(ctx).ArgoprojV1alpha1().Workflows(wf.Namespace).Patch(ctx, wf.Name, types.MergePatchType, []byte("{\"metadata\":{\"finalizers\":null}}"), metav1.PatchOptions{}) + if err != nil { + return nil, err + } + } err = auth.GetWfClient(ctx).ArgoprojV1alpha1().Workflows(wf.Namespace).Delete(ctx, wf.Name, metav1.DeleteOptions{PropagationPolicy: argoutil.GetDeletePropagation()}) if err != nil { return nil, err @@ -324,7 +331,7 @@ func (s *workflowServer) RetryWorkflow(ctx context.Context, req *workflowpkg.Wor return nil, err } - wf, podsToDelete, err := util.FormulateRetryWorkflow(ctx, wf, req.RestartSuccessful, req.NodeFieldSelector) + wf, podsToDelete, err := util.FormulateRetryWorkflow(ctx, wf, req.RestartSuccessful, req.NodeFieldSelector, req.Parameters) if err != nil { return nil, err } @@ -362,7 +369,7 @@ func (s *workflowServer) ResubmitWorkflow(ctx context.Context, req *workflowpkg. return nil, err } - newWF, err := util.FormulateResubmitWorkflow(wf, req.Memoized) + newWF, err := util.FormulateResubmitWorkflow(wf, req.Memoized, req.Parameters) if err != nil { return nil, err } @@ -528,7 +535,7 @@ func (s *workflowServer) LintWorkflow(ctx context.Context, req *workflowpkg.Work s.instanceIDService.Label(req.Workflow) creator.Label(ctx, req.Workflow) - _, err := validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, req.Workflow, validate.ValidateOpts{Lint: true}) + err := validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, req.Workflow, validate.ValidateOpts{Lint: true}) if err != nil { return nil, err } @@ -628,7 +635,7 @@ func (s *workflowServer) SubmitWorkflow(ctx context.Context, req *workflowpkg.Wo wftmplGetter := templateresolution.WrapWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().WorkflowTemplates(req.Namespace)) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err = validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, wf, validate.ValidateOpts{Submit: true}) + err = validate.ValidateWorkflow(wftmplGetter, cwftmplGetter, wf, validate.ValidateOpts{Submit: true}) if err != nil { return nil, err } diff --git a/server/workflowarchive/archived_workflow_server.go b/server/workflowarchive/archived_workflow_server.go index 4f8594f1e12c..c5e07f2c712f 100644 --- a/server/workflowarchive/archived_workflow_server.go +++ b/server/workflowarchive/archived_workflow_server.go @@ -56,6 +56,7 @@ func (w *archivedWorkflowServer) ListArchivedWorkflows(ctx context.Context, req name := "" minStartedAt := time.Time{} maxStartedAt := time.Time{} + showRemainingItemCount := false for _, selector := range strings.Split(options.FieldSelector, ",") { if len(selector) == 0 { continue @@ -74,6 +75,11 @@ func (w *archivedWorkflowServer) ListArchivedWorkflows(ctx context.Context, req if err != nil { return nil, err } + } else if strings.HasPrefix(selector, "ext.showRemainingItemCount") { + showRemainingItemCount, err = strconv.ParseBool(strings.TrimPrefix(selector, "ext.showRemainingItemCount=")) + if err != nil { + return nil, err + } } else { return nil, fmt.Errorf("unsupported requirement %s", selector) } @@ -109,6 +115,21 @@ func (w *archivedWorkflowServer) ListArchivedWorkflows(ctx context.Context, req meta := metav1.ListMeta{} + if showRemainingItemCount && !loadAll { + total, err := w.wfArchive.CountWorkflows(namespace, name, namePrefix, minStartedAt, maxStartedAt, requirements) + if err != nil { + return nil, err + } + var count = total - int64(offset) - int64(items.Len()) + if len(items) > limit { + count = count + 1 + } + if count < 0 { + count = 0 + } + meta.RemainingItemCount = &count + } + if !loadAll && len(items) > limit { items = items[0:limit] meta.Continue = fmt.Sprintf("%v", offset+limit) @@ -200,7 +221,7 @@ func (w *archivedWorkflowServer) ResubmitArchivedWorkflow(ctx context.Context, r return nil, err } - newWF, err := util.FormulateResubmitWorkflow(wf, req.Memoized) + newWF, err := util.FormulateResubmitWorkflow(wf, req.Memoized, nil) if err != nil { return nil, err } @@ -221,27 +242,35 @@ func (w *archivedWorkflowServer) RetryArchivedWorkflow(ctx context.Context, req return nil, err } - wf, podsToDelete, err := util.FormulateRetryWorkflow(ctx, wf, req.RestartSuccessful, req.NodeFieldSelector) - if err != nil { - return nil, err - } + _, err = wfClient.ArgoprojV1alpha1().Workflows(req.Namespace).Get(ctx, wf.Name, metav1.GetOptions{}) + if apierr.IsNotFound(err) { - for _, podName := range podsToDelete { - log.WithFields(log.Fields{"podDeleted": podName}).Info("Deleting pod") - err := kubeClient.CoreV1().Pods(wf.Namespace).Delete(ctx, podName, metav1.DeleteOptions{}) - if err != nil && !apierr.IsNotFound(err) { + wf, podsToDelete, err := util.FormulateRetryWorkflow(ctx, wf, req.RestartSuccessful, req.NodeFieldSelector, nil) + if err != nil { return nil, err } - } - wf, err = wfClient.ArgoprojV1alpha1().Workflows(req.Namespace).Update(ctx, wf, metav1.UpdateOptions{}) - if apierr.IsNotFound(err) { + for _, podName := range podsToDelete { + log.WithFields(log.Fields{"podDeleted": podName}).Info("Deleting pod") + err := kubeClient.CoreV1().Pods(wf.Namespace).Delete(ctx, podName, metav1.DeleteOptions{}) + if err != nil && !apierr.IsNotFound(err) { + return nil, err + } + } + wf.ObjectMeta.ResourceVersion = "" - wf, err = wfClient.ArgoprojV1alpha1().Workflows(req.Namespace).Create(ctx, wf, metav1.CreateOptions{}) + wf.ObjectMeta.UID = "" + result, err := wfClient.ArgoprojV1alpha1().Workflows(req.Namespace).Create(ctx, wf, metav1.CreateOptions{}) if err != nil { return nil, err } + + return result, nil + } + + if err == nil { + return nil, status.Error(codes.AlreadyExists, "Workflow already exists on cluster, use argo retry {name} instead") } - return wf, nil + return nil, err } diff --git a/server/workflowarchive/archived_workflow_server_test.go b/server/workflowarchive/archived_workflow_server_test.go index 973b46a2db40..387bebd28a4c 100644 --- a/server/workflowarchive/archived_workflow_server_test.go +++ b/server/workflowarchive/archived_workflow_server_test.go @@ -57,6 +57,7 @@ func Test_archivedWorkflowServer(t *testing.T) { repo.On("ListWorkflows", "", "my-name", "", minStartAt, maxStartAt, labels.Requirements(nil), 2, 0).Return(wfv1.Workflows{{}}, nil) repo.On("ListWorkflows", "", "", "my-", minStartAt, maxStartAt, labels.Requirements(nil), 2, 0).Return(wfv1.Workflows{{}}, nil) repo.On("ListWorkflows", "", "my-name", "my-", minStartAt, maxStartAt, labels.Requirements(nil), 2, 0).Return(wfv1.Workflows{{}}, nil) + repo.On("CountWorkflows", "", "my-name", "my-", minStartAt, maxStartAt, labels.Requirements(nil)).Return(int64(5), nil) repo.On("GetWorkflow", "").Return(nil, nil) repo.On("GetWorkflow", "my-uid").Return(&wfv1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "my-name"}, @@ -154,6 +155,12 @@ func Test_archivedWorkflowServer(t *testing.T) { assert.Len(t, resp.Items, 1) assert.Empty(t, resp.Continue) } + resp, err = w.ListArchivedWorkflows(ctx, &workflowarchivepkg.ListArchivedWorkflowsRequest{ListOptions: &metav1.ListOptions{FieldSelector: "metadata.name=my-name,spec.startedAt>2020-01-01T00:00:00Z,spec.startedAt<2020-01-02T00:00:00Z,ext.showRemainingItemCount=true", Limit: 1}, NamePrefix: "my-"}) + if assert.NoError(t, err) { + assert.Len(t, resp.Items, 1) + assert.Equal(t, *resp.ListMeta.RemainingItemCount, int64(4)) + assert.Empty(t, resp.Continue) + } }) t.Run("GetArchivedWorkflow", func(t *testing.T) { allowed = false @@ -185,9 +192,8 @@ func Test_archivedWorkflowServer(t *testing.T) { assert.Len(t, resp.Items, 2) }) t.Run("RetryArchivedWorkflow", func(t *testing.T) { - wf, err := w.RetryArchivedWorkflow(ctx, &workflowarchivepkg.RetryArchivedWorkflowRequest{Uid: "failed-uid"}) - assert.NoError(t, err) - assert.NotNil(t, wf) + _, err := w.RetryArchivedWorkflow(ctx, &workflowarchivepkg.RetryArchivedWorkflowRequest{Uid: "failed-uid"}) + assert.Equal(t, err, status.Error(codes.AlreadyExists, "Workflow already exists on cluster, use argo retry {name} instead")) }) t.Run("ResubmitArchivedWorkflow", func(t *testing.T) { wf, err := w.ResubmitArchivedWorkflow(ctx, &workflowarchivepkg.ResubmitArchivedWorkflowRequest{Uid: "resubmit-uid", Memoized: false}) diff --git a/server/workflowtemplate/workflow_template_server.go b/server/workflowtemplate/workflow_template_server.go index 94bb996a43ec..90dc096665f9 100644 --- a/server/workflowtemplate/workflow_template_server.go +++ b/server/workflowtemplate/workflow_template_server.go @@ -33,7 +33,7 @@ func (wts *WorkflowTemplateServer) CreateWorkflowTemplate(ctx context.Context, r creator.Label(ctx, req.Template) wftmplGetter := templateresolution.WrapWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().WorkflowTemplates(req.Namespace)) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err := validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{}) + err := validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{}) if err != nil { return nil, err } @@ -93,7 +93,7 @@ func (wts *WorkflowTemplateServer) LintWorkflowTemplate(ctx context.Context, req creator.Label(ctx, req.Template) wftmplGetter := templateresolution.WrapWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().WorkflowTemplates(req.Namespace)) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err := validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{Lint: true}) + err := validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{Lint: true}) if err != nil { return nil, err } @@ -111,7 +111,7 @@ func (wts *WorkflowTemplateServer) UpdateWorkflowTemplate(ctx context.Context, r wfClient := auth.GetWfClient(ctx) wftmplGetter := templateresolution.WrapWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().WorkflowTemplates(req.Namespace)) cwftmplGetter := templateresolution.WrapClusterWorkflowTemplateInterface(wfClient.ArgoprojV1alpha1().ClusterWorkflowTemplates()) - _, err = validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{}) + err = validate.ValidateWorkflowTemplate(wftmplGetter, cwftmplGetter, req.Template, validate.ValidateOpts{}) if err != nil { return nil, err } diff --git a/test/e2e/agent_test.go b/test/e2e/agent_test.go index 22de0aa3dd23..83dda64d3bee 100644 --- a/test/e2e/agent_test.go +++ b/test/e2e/agent_test.go @@ -37,19 +37,19 @@ spec: - - name: one template: http arguments: - parameters: [{name: url, value: "https://httpstat.us/200?sleep=5000"}] + parameters: [{name: url, value: "https://argoproj.github.io"}] - name: two template: http arguments: - parameters: [{name: url, value: "https://httpstat.us/200?sleep=5000"}] + parameters: [{name: url, value: "https://argoproj.github.io"}] - name: three template: http arguments: - parameters: [{name: url, value: "https://httpstat.us/200?sleep=5000"}] + parameters: [{name: url, value: "https://argoproj.github.io"}] - name: four template: http arguments: - parameters: [{name: url, value: "https://httpstat.us/200?sleep=5000"}] + parameters: [{name: url, value: "https://argoproj.github.io"}] - name: http inputs: parameters: @@ -59,18 +59,20 @@ spec: `). When(). SubmitWorkflow(). - WaitForWorkflow(time.Minute). + WaitForWorkflow(fixtures.ToBeCompleted). Then(). ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { assert.Equal(t, wfv1.WorkflowSucceeded, status.Phase) - // Ensure that the workflow ran for less than 20 seconds (5 seconds per task, 4 tasks) - assert.True(t, status.FinishedAt.Sub(status.StartedAt.Time) < time.Duration(20)*time.Second) + // Ensure that the workflow ran for less than 10 seconds + assert.True(t, status.FinishedAt.Sub(status.StartedAt.Time) < time.Duration(10*fixtures.EnvFactor)*time.Second) var finishedTimes []time.Time + var startTimes []time.Time for _, node := range status.Nodes { if node.Type != wfv1.NodeTypeHTTP { continue } + startTimes = append(startTimes, node.StartedAt.Time) finishedTimes = append(finishedTimes, node.FinishedAt.Time) } @@ -78,10 +80,16 @@ spec: sort.Slice(finishedTimes, func(i, j int) bool { return finishedTimes[i].Before(finishedTimes[j]) }) - // Everything finished with a two second tolerance window assert.True(t, finishedTimes[3].Sub(finishedTimes[0]) < time.Duration(2)*time.Second) } + if assert.Len(t, startTimes, 4) { + sort.Slice(startTimes, func(i, j int) bool { + return startTimes[i].Before(startTimes[j]) + }) + // Everything started with same time + assert.True(t, startTimes[3].Sub(startTimes[0]) == 0) + } }) } diff --git a/test/e2e/argo_server_test.go b/test/e2e/argo_server_test.go index 403ee60b19a0..b5ba97751ece 100644 --- a/test/e2e/argo_server_test.go +++ b/test/e2e/argo_server_test.go @@ -9,6 +9,7 @@ import ( "fmt" "io/ioutil" "net/http" + "os" "strings" "testing" "time" @@ -51,7 +52,7 @@ func (s *ArgoServerSuite) e() *httpexpect.Expect { BaseURL: baseUrl, Reporter: httpexpect.NewRequireReporter(s.T()), Printers: []httpexpect.Printer{ - httpexpect.NewDebugPrinter(&httpLogger{}, true), + httpexpect.NewDebugPrinter(s.T(), true), }, Client: httpClient, }). @@ -105,21 +106,26 @@ func (s *ArgoServerSuite) TestMetricsForbidden() { } func (s *ArgoServerSuite) TestMetricsOK() { - s.e(). + body := s.e(). GET("/metrics"). Expect(). Status(200). - Body(). + Body() + body. // https://blog.netsil.com/the-4-golden-signals-of-api-health-and-performance-in-cloud-native-applications-a6e87526e74 // Latency: The time it takes to service a request, with a focus on distinguishing between the latency of successful requests and the latency of failed requests Contains(`grpc_server_handling_seconds_bucket`). // Traffic: A measure of how much demand is being placed on the service. This is measured using a high-level service-specific metric, like HTTP requests per second in the case of an HTTP REST API. Contains(`promhttp_metric_handler_requests_in_flight`). // Errors: The rate of requests that fail. The failures can be explicit (e.g., HTTP 500 errors) or implicit (e.g., an HTTP 200 OK response with a response body having too few items). - Contains(`promhttp_metric_handler_requests_total{code="500"}`). - // Saturation: How “full” is the service. This is a measure of the system utilization, emphasizing the resources that are most constrained (e.g., memory, I/O or CPU). Services degrade in performance as they approach high saturation. - Contains(`process_cpu_seconds_total`). - Contains(`process_resident_memory_bytes`) + Contains(`promhttp_metric_handler_requests_total{code="500"}`) + + if os.Getenv("CI") == "true" { + body. + // Saturation: How “full” is the service. This is a measure of the system utilization, emphasizing the resources that are most constrained (e.g., memory, I/O or CPU). Services degrade in performance as they approach high saturation. + Contains(`process_cpu_seconds_total`). + Contains(`process_resident_memory_bytes`) + } } func (s *ArgoServerSuite) TestSubmitWorkflowTemplateFromGithubWebhook() { @@ -854,7 +860,6 @@ func (s *ArgoServerSuite) TestWorkflowService() { }) s.Run("Terminate", func() { - s.Need(fixtures.None(fixtures.Kubelet)) s.e().PUT("/api/v1/workflows/argo/" + name + "/terminate"). Expect(). Status(200) @@ -873,7 +878,6 @@ func (s *ArgoServerSuite) TestWorkflowService() { }) s.Run("Resubmit", func() { - s.Need(fixtures.BaseLayerArtifacts) s.e().PUT("/api/v1/workflows/argo/" + name + "/resubmit"). WithBytes([]byte(`{"memoized": true}`)). Expect(). @@ -1053,13 +1057,107 @@ func (s *ArgoServerSuite) TestArtifactServer() { uid = metadata.UID }) + s.artifactServerRetrievalTests(name, uid) +} + +func (s *ArgoServerSuite) TestArtifactServerAzure() { + if os.Getenv("AZURE") != "true" { + s.T().Skip("AZURE must be true to run Azure Storage e2e tests") + } + var uid types.UID + var name string + s.Given(). + Workflow(`@testdata/artifact-workflow-azure.yaml`). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeArchived). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + name = metadata.Name + uid = metadata.UID + }) + + s.artifactServerRetrievalTests(name, uid) +} + +func (s *ArgoServerSuite) artifactServerRetrievalTests(name string, uid types.UID) { s.Run("GetArtifact", func() { - s.e().GET("/artifacts/argo/" + name + "/" + name + "/main-file"). + resp := s.e().GET("/artifacts/argo/" + name + "/" + name + "/main-file"). Expect(). - Status(200). - Body(). + Status(200) + + resp.Body(). + Contains(":) Hello Argo!") + + resp.Header("Content-Security-Policy"). + Equal("sandbox; base-uri 'none'; default-src 'none'; img-src 'self'; style-src 'self' 'unsafe-inline'") + + resp.Header("X-Frame-Options"). + Equal("SAMEORIGIN") + }) + + // In this case, the artifact name is a file + s.Run("GetArtifactFile", func() { + resp := s.e().GET("/artifact-files/argo/workflows/" + name + "/" + name + "/outputs/main-file"). + Expect(). + Status(200) + + resp.Body(). Contains(":) Hello Argo!") + + resp.Header("Content-Security-Policy"). + Equal("sandbox; base-uri 'none'; default-src 'none'; img-src 'self'; style-src 'self' 'unsafe-inline'") + + resp.Header("X-Frame-Options"). + Equal("SAMEORIGIN") }) + + // In this case, the artifact name is a directory + s.Run("GetArtifactFileDirectory", func() { + resp := s.e().GET("/artifact-files/argo/workflows/" + name + "/" + name + "/outputs/out/"). + Expect(). + Status(200) + + resp.Body(). + Contains("subdirectory/") + + }) + + // In this case, the filename specified in the request is actually a directory + s.Run("GetArtifactFileSubdirectory", func() { + resp := s.e().GET("/artifact-files/argo/workflows/" + name + "/" + name + "/outputs/out/subdirectory/"). + Expect(). + Status(200) + + resp.Body(). + Contains("sub-file-1"). + Contains("sub-file-2") + + }) + + // In this case, the filename specified in the request is a subdirectory file + s.Run("GetArtifactSubfile", func() { + resp := s.e().GET("/artifact-files/argo/workflows/" + name + "/" + name + "/outputs/out/subdirectory/sub-file-1"). + Expect(). + Status(200) + + resp.Body(). + Contains(":) Hello Argo!") + + resp.Header("Content-Security-Policy"). + Equal("sandbox; base-uri 'none'; default-src 'none'; img-src 'self'; style-src 'self' 'unsafe-inline'") + + resp.Header("X-Frame-Options"). + Equal("SAMEORIGIN") + }) + + // In this case, the artifact name is a file + s.Run("GetArtifactBadFile", func() { + _ = s.e().GET("/artifact-files/argo/workflows/" + name + "/" + name + "/outputs/not-a-file"). + Expect(). + Status(500) + }) + s.Run("GetArtifactByUID", func() { s.e().DELETE("/api/v1/workflows/argo/" + name). Expect(). @@ -1082,6 +1180,14 @@ func (s *ArgoServerSuite) TestArtifactServer() { Expect(). Status(200) }) + + s.Run("GetArtifactFileByUID", func() { + s.e().GET("/artifact-files/argo/archived-workflows/{uid}/{name}/outputs/main-file", uid, name). + Expect(). + Status(200). + Body(). + Contains(":) Hello Argo!") + }) } func (s *ArgoServerSuite) stream(url string, f func(t *testing.T, line string) (done bool)) { @@ -1194,6 +1300,7 @@ spec: uid = metadata.UID }) var failedUid types.UID + var failedName string s.Given(). Workflow(` metadata: @@ -1214,6 +1321,7 @@ spec: Then(). ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { failedUid = metadata.UID + failedName = metadata.Name }) s.Given(). Workflow(` @@ -1324,17 +1432,27 @@ spec: NotNull() }) + s.Run("DeleteForRetry", func() { + s.e().DELETE("/api/v1/workflows/argo/" + failedName). + Expect(). + Status(200) + }) + s.Run("Retry", func() { s.e().PUT("/api/v1/archived-workflows/{uid}/retry", failedUid). WithBytes([]byte(`{"namespace": "argo"}`)). Expect(). Status(200). JSON(). + Path("$.metadata.name"). NotNull() + s.e().PUT("/api/v1/archived-workflows/{uid}/retry", failedUid). + WithBytes([]byte(`{"namespace": "argo"}`)). + Expect(). + Status(409) }) s.Run("Resubmit", func() { - s.Need(fixtures.BaseLayerArtifacts) s.e().PUT("/api/v1/archived-workflows/{uid}/resubmit", uid). WithBytes([]byte(`{"namespace": "argo", "memoized": false}`)). Expect(). @@ -1732,20 +1850,6 @@ func (s *ArgoServerSuite) TestEventSourcesService() { }) } -func (s *ArgoServerSuite) TestPipelineService() { - s.T().SkipNow() - s.Run("GetPipeline", func() { - s.e().GET("/api/v1/pipelines/argo/not-exists"). - Expect(). - Status(404) - }) - s.Run("ListPipelines", func() { - s.e().GET("/api/v1/pipelines/argo"). - Expect(). - Status(200) - }) -} - func (s *ArgoServerSuite) TestSensorService() { s.Run("CreateSensor", func() { s.e().POST("/api/v1/sensors/argo"). @@ -1875,6 +1979,19 @@ func (s *ArgoServerSuite) TestSensorService() { }) } +func (s *ArgoServerSuite) TestRateLimitHeader() { + s.Run("GetRateLimit", func() { + resp := s.e().GET("/api/v1/version"). + Expect(). + Status(200) + + resp.Header("X-RateLimit-Limit").NotEmpty() + resp.Header("X-RateLimit-Remaining").NotEmpty() + resp.Header("X-RateLimit-Reset").NotEmpty() + resp.Header("Retry-After").Empty() + }) +} + func TestArgoServerSuite(t *testing.T) { suite.Run(t, new(ArgoServerSuite)) } diff --git a/test/e2e/artifacts_test.go b/test/e2e/artifacts_test.go index a60396cdf493..dd5390f98289 100644 --- a/test/e2e/artifacts_test.go +++ b/test/e2e/artifacts_test.go @@ -4,12 +4,17 @@ package e2e import ( + "fmt" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "github.com/argoproj/argo-workflows/v3/workflow/common" + + "github.com/minio/minio-go/v7" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" ) @@ -36,7 +41,6 @@ func (s *ArtifactsSuite) TestOutputOnMount() { } func (s *ArtifactsSuite) TestOutputOnInput() { - s.Need(fixtures.BaseLayerArtifacts) // I believe this would work on both K8S and Kubelet, but validation does not allow it s.Given(). Workflow("@testdata/output-on-input-workflow.yaml"). When(). @@ -45,7 +49,6 @@ func (s *ArtifactsSuite) TestOutputOnInput() { } func (s *ArtifactsSuite) TestArtifactPassing() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow("@smoke/artifact-passing.yaml"). When(). @@ -53,8 +56,129 @@ func (s *ArtifactsSuite) TestArtifactPassing() { WaitForWorkflow(fixtures.ToBeSucceeded) } +type artifactState struct { + key string + bucketName string + deletedAtWFCompletion bool + deletedAtWFDeletion bool +} + +func (s *ArtifactsSuite) TestArtifactGC() { + + s.Given(). + WorkflowTemplate("@testdata/artifactgc/artgc-template.yaml"). + When(). + CreateWorkflowTemplates() + + for _, tt := range []struct { + workflowFile string + expectedArtifacts []artifactState + expectedGCPodsOnWFCompletion int + }{ + { + workflowFile: "@testdata/artifactgc/artgc-multi-strategy-multi-anno.yaml", + expectedGCPodsOnWFCompletion: 2, + expectedArtifacts: []artifactState{ + artifactState{"first-on-completion-1", "my-bucket-2", true, false}, + artifactState{"first-on-completion-2", "my-bucket-3", true, false}, + artifactState{"first-no-deletion", "my-bucket-3", false, false}, + artifactState{"second-on-deletion", "my-bucket-3", false, true}, + artifactState{"second-on-completion", "my-bucket-2", true, false}, + }, + }, + { + workflowFile: "@testdata/artifactgc/artgc-from-template.yaml", + expectedGCPodsOnWFCompletion: 1, + expectedArtifacts: []artifactState{ + artifactState{"on-completion", "my-bucket-2", true, false}, + artifactState{"on-deletion", "my-bucket-2", false, true}, + }, + }, + { + workflowFile: "@testdata/artifactgc/artgc-step-wf-tmpl.yaml", + expectedGCPodsOnWFCompletion: 1, + expectedArtifacts: []artifactState{ + artifactState{"on-completion", "my-bucket-2", true, false}, + artifactState{"on-deletion", "my-bucket-2", false, true}, + }, + }, + } { + // for each test make sure that: + // 1. the finalizer gets added + // 2. the artifacts are deleted at the right time + // 3. the finalizer gets removed after all artifacts are deleted + // (note that in order to verify that the finalizer has been added once the Workflow's been submitted, + // we need it to still be there after being submitted, so each of the following tests includes at least one + // 'OnWorkflowDeletion' strategy) + + when := s.Given(). + Workflow(tt.workflowFile). + When(). + SubmitWorkflow() + when. + WaitForWorkflow(fixtures.ToBeCompleted). + Then(). + ExpectWorkflow(func(t *testing.T, objectMeta *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Contains(t, objectMeta.Finalizers, common.FinalizerArtifactGC) + }) + + // wait for all pods to have started and been completed and recouped + when. + WaitForWorkflow( + fixtures.WorkflowCompletionOkay(true), + fixtures.Condition(func(wf *wfv1.Workflow) (bool, string) { + return len(wf.Status.ArtifactGCStatus.PodsRecouped) >= tt.expectedGCPodsOnWFCompletion, + fmt.Sprintf("for all %d pods to have been recouped", tt.expectedGCPodsOnWFCompletion) + })) + + then := when.Then() + + // verify that the artifacts that should have been deleted at completion time were + for _, expectedArtifact := range tt.expectedArtifacts { + if expectedArtifact.deletedAtWFCompletion { + fmt.Printf("verifying artifact %s is deleted at completion time\n", expectedArtifact.key) + then.ExpectArtifactByKey(expectedArtifact.key, expectedArtifact.bucketName, func(t *testing.T, object minio.ObjectInfo, err error) { + assert.NotNil(t, err) + }) + } else { + fmt.Printf("verifying artifact %s is not deleted at completion time\n", expectedArtifact.key) + then.ExpectArtifactByKey(expectedArtifact.key, expectedArtifact.bucketName, func(t *testing.T, object minio.ObjectInfo, err error) { + assert.Nil(t, err) + }) + } + } + + fmt.Println("deleting workflow; verifying that Artifact GC finalizer gets removed") + + when. + DeleteWorkflow(). + WaitForWorkflowDeletion() + + when = when.RemoveFinalizers(false) // just in case - if the above test failed we need to forcibly remove the finalizer for Artifact GC + + then = when.Then() + + for _, expectedArtifact := range tt.expectedArtifacts { + + if expectedArtifact.deletedAtWFCompletion { // already checked this + continue + } + if expectedArtifact.deletedAtWFDeletion { + fmt.Printf("verifying artifact %s is deleted\n", expectedArtifact.key) + then.ExpectArtifactByKey(expectedArtifact.key, expectedArtifact.bucketName, func(t *testing.T, object minio.ObjectInfo, err error) { + assert.NotNil(t, err) + }) + } else { + fmt.Printf("verifying artifact %s is not deleted\n", expectedArtifact.key) + then.ExpectArtifactByKey(expectedArtifact.key, expectedArtifact.bucketName, func(t *testing.T, object minio.ObjectInfo, err error) { + assert.Nil(t, err) + }) + } + } + } +} + func (s *ArtifactsSuite) TestDefaultParameterOutputs() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow(` apiVersion: argoproj.io/v1alpha1 @@ -107,7 +231,6 @@ spec: } func (s *ArtifactsSuite) TestSameInputOutputPathOptionalArtifact() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow("@testdata/same-input-output-path-optional.yaml"). When(). @@ -139,14 +262,10 @@ func (s *ArtifactsSuite) TestMainLog() { SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeSucceeded). Then(). - ExpectWorkflow(func(t *testing.T, m *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - n := status.Nodes[m.Name] - if assert.NotNil(t, n) { - assert.Len(t, n.Outputs.Artifacts, 1) - } + ExpectArtifact("-", "main-logs", "my-bucket", func(t *testing.T, object minio.ObjectInfo, err error) { + assert.NoError(t, err) }) }) - s.Need(fixtures.None(fixtures.Docker, fixtures.Kubelet)) s.Run("ActiveDeadlineSeconds", func() { s.Given(). Workflow("@expectedfailures/timeouts-step.yaml"). @@ -154,10 +273,59 @@ func (s *ArtifactsSuite) TestMainLog() { SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeFailed). Then(). + ExpectArtifact("-", "main-logs", "my-bucket", func(t *testing.T, object minio.ObjectInfo, err error) { + assert.NoError(t, err) + }) + }) +} + +func (s *ArtifactsSuite) TestContainersetLogs() { + s.Run("Basic", func() { + s.Given(). + Workflow(` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: containerset-logs- +spec: + entrypoint: main + templates: + - name: main + containerSet: + containers: + - name: a + image: argoproj/argosay:v2 + - name: b + image: argoproj/argosay:v2 +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded). + Then(). ExpectWorkflow(func(t *testing.T, m *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { n := status.Nodes[m.Name] - if assert.NotNil(t, n.Outputs) { - assert.Len(t, n.Outputs.Artifacts, 1) + expectedOutputs := &wfv1.Outputs{ + Artifacts: wfv1.Artifacts{ + { + Name: "a-logs", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + Key: fmt.Sprintf("%s/%s/a.log", m.Name, m.Name), + }, + }, + }, + { + Name: "b-logs", + ArtifactLocation: wfv1.ArtifactLocation{ + S3: &wfv1.S3Artifact{ + Key: fmt.Sprintf("%s/%s/b.log", m.Name, m.Name), + }, + }, + }, + }, + } + if assert.NotNil(t, n) { + assert.Equal(t, n.Outputs, expectedOutputs) } }) }) diff --git a/test/e2e/azure/deploy-azurite.yaml b/test/e2e/azure/deploy-azurite.yaml new file mode 100644 index 000000000000..26adefe75a38 --- /dev/null +++ b/test/e2e/azure/deploy-azurite.yaml @@ -0,0 +1,86 @@ +--- +apiVersion: v1 +kind: Secret +metadata: + name: azurite-credentials +type: Opaque +data: + # Well-known access key for Azurite (see https://docs.microsoft.com/en-us/azure/storage/common/storage-use-azurite?tabs=docker-hub#well-known-storage-account-and-key) + account-access-key: RWJ5OHZkTTAyeE5PY3FGbHFVd0pQTGxtRXRsQ0RYSjFPVXpGVDUwdVNSWjZJRnN1RnEyVVZFckN6NEk2dHEvSzFTWkZQVE90ci9LQkhCZWtzb0dNR3c9PQ== +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: azure-artifact-repositories +data: + azure-v1: | + archiveLogs: true + azure: + endpoint: http://azurite:10000/azurite + container: my-container + accountKeySecret: + name: azurite-credentials + key: account-access-key +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: azurite +spec: + replicas: 1 + selector: + matchLabels: + app: azurite + template: + metadata: + labels: + app: azurite + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: azurite + image: mcr.microsoft.com/azure-storage/azurite + args: + - azurite-blob + - --blobHost + - 0.0.0.0 + ports: + - containerPort: 10000 + protocol: TCP + env: + - name: AZURITE_ACCOUNT_KEY + valueFrom: + secretKeyRef: + name: azurite-credentials + key: account-access-key + - name: AZURITE_ACCOUNTS + value: "azurite:$(AZURITE_ACCOUNT_KEY)" + - name: create-storage-container + image: mcr.microsoft.com/azure-cli + command: [ "/bin/bash", "-c" ] + args: + - until az storage container show -n my-container --blob-endpoint http://localhost:10000/azurite --account-key $AZURE_ACCOUNT_KEY -onone; do + result=$(az storage container create -n my-container --blob-endpoint http://localhost:10000/azurite --account-key $AZURE_ACCOUNT_KEY -otsv); + [[ "$result" == "True" ]] && { echo "Storage container was created!"; break; }; + echo "Waiting for azurite container to start..."; + sleep 2; + done; + while true; do sleep infinity; done + env: + - name: AZURE_ACCOUNT_KEY + valueFrom: + secretKeyRef: + name: azurite-credentials + key: account-access-key +--- +apiVersion: v1 +kind: Service +metadata: + name: azurite +spec: + selector: + app: azurite + ports: + - port: 10000 + targetPort: 10000 + protocol: TCP diff --git a/test/e2e/cli_test.go b/test/e2e/cli_test.go index 850494246146..4a67ce57861e 100644 --- a/test/e2e/cli_test.go +++ b/test/e2e/cli_test.go @@ -225,6 +225,9 @@ func (s *CLISuite) TestSubmitServerDryRun() { } func (s *CLISuite) TestTokenArg() { + if os.Getenv("CI") != "true" { + s.T().Skip("we only set-up the KUBECONFIG on CI") + } s.setMode(KUBE) s.Run("ListWithBadToken", func() { s.Given().RunCli([]string{"list", "--user", "fake_token_user", "--token", "badtoken"}, func(t *testing.T, output string, err error) { @@ -864,6 +867,29 @@ func (s *CLISuite) TestWorkflowRetry() { }) } +func (s *CLISuite) TestWorkflowRetryNestedDag() { + s.Given(). + Workflow("@testdata/retry-nested-dag-test.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeFailed). + Then(). + RunCli([]string{"retry", "retry-nested-dag", "--restart-successful", "--node-field-selector", "name=retry-nested-dag.dag1-step2.dag2-step1.dag3-step1"}, func(t *testing.T, output string, err error) { + if assert.NoError(t, err, output) { + assert.Contains(t, output, "Name:") + assert.Contains(t, output, "Namespace:") + } + }). + When(). + WaitForWorkflow(fixtures.ToBeFailed). + Then(). + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.NodeSucceeded, status.Nodes.FindByDisplayName("dag3-step3").Phase) + assert.Equal(t, wfv1.NodeSucceeded, status.Nodes.FindByDisplayName("dag3-step2").Phase) + assert.Equal(t, wfv1.NodeSucceeded, status.Nodes.FindByDisplayName("dag3-step1").Phase) + }) +} + func (s *CLISuite) TestWorkflowStop() { s.Given(). Workflow("@smoke/basic.yaml"). @@ -1329,6 +1355,88 @@ func (s *CLISuite) TestWorkflowTemplateRefSubmit() { }) } +func (s *CLISuite) TestWorkflowCopyArtifact() { + s.workflowCopyArtifactTests("basic-artifact-workflow.yaml") +} + +func (s *CLISuite) TestWorkflowCopyArtifactAzure() { + if os.Getenv("AZURE") != "true" { + s.T().Skip("AZURE must be true to run Azure Storage e2e tests") + } + + s.workflowCopyArtifactTests("basic-artifact-workflow-azure.yaml") +} + +func (s *CLISuite) workflowCopyArtifactTests(workflowFileName string) { + s.Given(). + Workflow(fmt.Sprintf("@testdata/%s", workflowFileName)). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Given(). + RunCli([]string{"cp", "@latest", "outputDir"}, func(t *testing.T, output string, err error) { + if assert.NoError(t, err) { + assert.Contains(t, output, "Created \"main.log\"") + assert.Contains(t, output, "Created \"hello_world.tgz\"") + assert.Contains(t, output, "Created \"bye_world.tgz\"") + } + }) + os.RemoveAll("outputDir") + + s.Given(). + Workflow(fmt.Sprintf("@testdata/%s", workflowFileName)). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Given(). + RunCli([]string{"cp", "@latest", "outputDir", "--template-name", "bye"}, func(t *testing.T, output string, err error) { + if assert.NoError(t, err) { + assert.Contains(t, output, "Created \"main.log\"") + assert.Contains(t, output, "Created \"bye_world.tgz\"") + assert.NotContains(t, output, "Created \"hello_world.tgz\"") + } + }) + os.RemoveAll("outputDir") + + s.Given(). + Workflow(fmt.Sprintf("@testdata/%s", workflowFileName)). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Given(). + RunCli([]string{"cp", "@latest", "outputDir", "--artifact-name", "hello_world"}, func(t *testing.T, output string, err error) { + if assert.NoError(t, err) { + assert.NotContains(t, output, "Created \"main.log\"") + assert.NotContains(t, output, "Created \"bye_world.tgz\"") + assert.Contains(t, output, "Created \"hello_world.tgz\"") + } + }) + os.RemoveAll("outputDir") + + s.Given(). + Workflow(fmt.Sprintf("@testdata/%s", workflowFileName)). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Given(). + RunCli([]string{"cp", "@latest", ".", "--path", "/{templateName}/{artifactName}/"}, func(t *testing.T, output string, err error) { + if assert.NoError(t, err) { + //Assert everything was stored + assert.Contains(t, output, "Created \"main.log\"") + assert.Contains(t, output, "Created \"bye_world.tgz\"") + assert.Contains(t, output, "Created \"hello_world.tgz\"") + //Assert filepaths are correct + statStrip := func(f os.FileInfo, err error) error { + return err + } + assert.NoError(t, statStrip(os.Stat("bye/bye_world/bye_world.tgz"))) + assert.NoError(t, statStrip(os.Stat("hello/hello_world/hello_world.tgz"))) + } + }) + os.RemoveAll("bye") + os.RemoveAll("hello") +} + func (s *CLISuite) TestRetryOmit() { s.Given(). Workflow("@testdata/retry-omit.yaml"). diff --git a/test/e2e/expectedfailures/failed-step-event.yaml b/test/e2e/expectedfailures/failed-step-event.yaml index 46aa9e9104ad..f99460b0bc3a 100644 --- a/test/e2e/expectedfailures/failed-step-event.yaml +++ b/test/e2e/expectedfailures/failed-step-event.yaml @@ -11,4 +11,4 @@ spec: - name: exit container: image: argoproj/argosay:v2 - args: [exit, 1] + args: ["exit", "1"] diff --git a/test/e2e/expectedfailures/large-workflow.yaml b/test/e2e/expectedfailures/large-workflow.yaml index 646678ee4fee..a052da4057f3 100644 --- a/test/e2e/expectedfailures/large-workflow.yaml +++ b/test/e2e/expectedfailures/large-workflow.yaml @@ -13,7 +13,7 @@ spec: kind: ConfigMap metadata: name: test - namespace: default + namespace: argo data: keys: > gEPqig/hsFDwFO/Xg/ibItxzl5dDsbITZ172yn5eDJO2KR4NOV3Pc0jaDr+9WScAVp/nGdGTR6Si diff --git a/test/e2e/fixtures/e2e_suite.go b/test/e2e/fixtures/e2e_suite.go index 26e8407a462c..e1435fed1add 100644 --- a/test/e2e/fixtures/e2e_suite.go +++ b/test/e2e/fixtures/e2e_suite.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "github.com/TwinProduction/go-color" + "github.com/TwiN/go-color" "github.com/stretchr/testify/suite" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" @@ -36,7 +36,8 @@ const ( Label = workflow.WorkflowFullName + "/test" // mark this workflow as a test ) -var defaultTimeout = env.LookupEnvDurationOr("E2E_TIMEOUT", 30*time.Second) +var defaultTimeout = env.LookupEnvDurationOr("E2E_WAIT_TIMEOUT", 60*time.Second) +var EnvFactor = env.LookupEnvIntOr("E2E_ENV_FACTOR", 1) type E2ESuite struct { suite.Suite @@ -60,12 +61,12 @@ func (s *E2ESuite) SetupSuite() { s.CheckError(err) s.KubeClient, err = kubernetes.NewForConfig(s.RestConfig) s.CheckError(err) - configController := config.NewController(Namespace, "workflow-controller-configmap", s.KubeClient, config.EmptyConfigFunc) + configController := config.NewController(Namespace, common.ConfigMapName, s.KubeClient) ctx := context.Background() c, err := configController.Get(ctx) s.CheckError(err) - s.Config = c.(*config.Config) + s.Config = c s.wfClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().Workflows(Namespace) s.wfebClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().WorkflowEventBindings(Namespace) s.wfTemplateClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().WorkflowTemplates(Namespace) @@ -80,6 +81,9 @@ func (s *E2ESuite) TearDownSuite() { for _, x := range s.slowTests { _, _ = fmt.Println(color.Ize(color.Yellow, fmt.Sprintf("=== SLOW TEST: %s", x))) } + if s.T().Failed() { + s.T().Log("to learn how to diagnose failed tests: https://argoproj.github.io/argo-workflows/running-locally/#running-e2e-tests-locally") + } } func (s *E2ESuite) BeforeTest(string, string) { @@ -154,15 +158,6 @@ func (s *E2ESuite) DeleteResources() { } } -func (s *E2ESuite) Need(needs ...Need) { - for _, n := range needs { - met, message := n(s) - if !met { - s.T().Skip("unmet need: " + message) - } - } -} - func (s *E2ESuite) dynamicFor(r schema.GroupVersionResource) dynamic.ResourceInterface { resourceInterface := dynamic.NewForConfigOrDie(s.RestConfig).Resource(r) if r.Resource == workflow.ClusterWorkflowTemplatePlural { diff --git a/test/e2e/fixtures/given.go b/test/e2e/fixtures/given.go index 58e5694983b8..2b4fae31bc26 100644 --- a/test/e2e/fixtures/given.go +++ b/test/e2e/fixtures/given.go @@ -7,7 +7,7 @@ import ( "strings" "testing" - "github.com/TwinProduction/go-color" + "github.com/TwiN/go-color" "github.com/stretchr/testify/assert" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" @@ -43,9 +43,6 @@ func (g *Given) Workflow(text string) *Given { g.t.Helper() g.wf = &wfv1.Workflow{} g.readResource(text, g.wf) - if g.wf.Name != "" { - g.t.Fatalf("workflow %q, but should use generate name", text) - } g.checkImages(g.wf.Spec.Templates) return g } diff --git a/test/e2e/fixtures/needs.go b/test/e2e/fixtures/needs.go deleted file mode 100644 index 811a37a68244..000000000000 --- a/test/e2e/fixtures/needs.go +++ /dev/null @@ -1,67 +0,0 @@ -package fixtures - -import ( - "os" -) - -type Need func(s *E2ESuite) (met bool, message string) - -var ( - CI Need = func(s *E2ESuite) (bool, string) { - return os.Getenv("CI") != "", "CI" - } - BaseLayerArtifacts Need = func(s *E2ESuite) (bool, string) { - met, _ := None(Kubelet)(s) - return met, "base layer artifact support" - } - Docker = Executor("docker") - Emissary = Executor("emissary") - Kubelet = Executor("kubelet") - PNS = Executor("pns") -) - -func Executor(e string) Need { - return func(s *E2ESuite) (bool, string) { - v := s.Config.ContainerRuntimeExecutor - if v == "" { - v = "emissary" - } - return v == e, e - } -} - -func None(needs ...Need) Need { - return func(s *E2ESuite) (bool, string) { - for _, n := range needs { - met, message := n(s) - if met { - return false, "not " + message - } - } - return true, "" - } -} - -func Any(needs ...Need) Need { - return func(s *E2ESuite) (bool, string) { - for _, n := range needs { - met, _ := n(s) - if met { - return true, "" - } - } - return false, "" - } -} - -func All(needs ...Need) Need { - return func(s *E2ESuite) (bool, string) { - for _, n := range needs { - met, message := n(s) - if !met { - return false, message - } - } - return true, "" - } -} diff --git a/test/e2e/fixtures/print.go b/test/e2e/fixtures/print.go index c219257fcfd3..e10ceb7e91c4 100644 --- a/test/e2e/fixtures/print.go +++ b/test/e2e/fixtures/print.go @@ -6,7 +6,7 @@ import ( "sort" "text/tabwriter" - "github.com/TwinProduction/go-color" + "github.com/TwiN/go-color" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) diff --git a/test/e2e/fixtures/then.go b/test/e2e/fixtures/then.go index 872fd56c6296..f52bdfb19b4f 100644 --- a/test/e2e/fixtures/then.go +++ b/test/e2e/fixtures/then.go @@ -3,8 +3,6 @@ package fixtures import ( "context" "fmt" - "io/ioutil" - "net/http" "reflect" "testing" "time" @@ -15,6 +13,9 @@ import ( "k8s.io/apimachinery/pkg/types" "k8s.io/client-go/kubernetes" + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned/typed/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/workflow/common" @@ -184,29 +185,33 @@ func (t *Then) ExpectAuditEvents(filter func(event apiv1.Event) bool, num int, b return t } -func (t *Then) ExpectArtifact(nodeName, artifactName string, f func(t *testing.T, data []byte)) { +func (t *Then) ExpectArtifact(nodeName string, artifactName string, bucketName string, f func(t *testing.T, object minio.ObjectInfo, err error)) { t.t.Helper() - nodeId := nodeIdForName(nodeName, t.wf) - url := "http://localhost:2746/artifacts/" + Namespace + "/" + t.wf.Name + "/" + nodeId + "/" + artifactName - println(url) - req, err := http.NewRequest("GET", url, nil) - if err != nil { - t.t.Fatal(err) - } - req.Header.Set("Authorization", "Bearer "+t.bearerToken) - resp, err := http.DefaultClient.Do(req) - if err != nil { - t.t.Fatal(err) + + if nodeName == "-" { + nodeName = t.wf.Name } - defer resp.Body.Close() - data, err := ioutil.ReadAll(resp.Body) + + n := t.wf.GetNodeByName(nodeName) + a := n.GetOutputs().GetArtifactByName(artifactName) + key, _ := a.GetKey() + + t.ExpectArtifactByKey(key, bucketName, f) +} + +func (t *Then) ExpectArtifactByKey(key string, bucketName string, f func(t *testing.T, object minio.ObjectInfo, err error)) { + t.t.Helper() + + c, err := minio.New("localhost:9000", &minio.Options{ + Creds: credentials.NewStaticV4("admin", "password", ""), + }) + if err != nil { - t.t.Fatal(err) - } - if resp.StatusCode != 200 { - t.t.Fatal(fmt.Errorf("HTTP request not OK: %s: %q", resp.Status, data)) + t.t.Error(err) } - f(t.t, data) + + object, err := c.StatObject(context.Background(), bucketName, key, minio.StatObjectOptions{}) + f(t.t, object, err) } func (t *Then) ExpectPods(f func(t *testing.T, pods []apiv1.Pod)) *Then { @@ -222,14 +227,6 @@ func (t *Then) ExpectPods(f func(t *testing.T, pods []apiv1.Pod)) *Then { return t } -func nodeIdForName(nodeName string, wf *wfv1.Workflow) string { - if nodeName == "-" { - return wf.NodeID(wf.Name) - } else { - return wf.NodeID(nodeName) - } -} - func (t *Then) RunCli(args []string, block func(t *testing.T, output string, err error)) *Then { t.t.Helper() output, err := Exec("../../dist/argo", append([]string{"-n", Namespace}, args...)...) diff --git a/test/e2e/fixtures/when.go b/test/e2e/fixtures/when.go index 67222e6936eb..cce99669bf74 100644 --- a/test/e2e/fixtures/when.go +++ b/test/e2e/fixtures/when.go @@ -233,12 +233,16 @@ var ToBeWaitingOnAMutex Condition = func(wf *wfv1.Workflow) (bool, string) { return wf.Status.Synchronization != nil && wf.Status.Synchronization.Mutex != nil, "to be waiting on a mutex" } +type WorkflowCompletionOkay bool + // Wait for a workflow to meet a condition: // Options: // * `time.Duration` - change the timeout - 30s by default // * `string` - either: // * the workflow's name (not spaces) // * or a new message (if it contain spaces) - default "to finish" +// * `WorkflowCompletionOkay`` (bool alias): if this is true, we won't stop checking for the other options +// * just because the Workflow completed // * `Condition` - a condition - `ToFinish` by default func (w *When) WaitForWorkflow(options ...interface{}) *When { w.t.Helper() @@ -248,6 +252,7 @@ func (w *When) WaitForWorkflow(options ...interface{}) *When { workflowName = w.wf.Name } condition := ToBeDone + var workflowCompletionOkay WorkflowCompletionOkay for _, opt := range options { switch v := opt.(type) { case time.Duration: @@ -256,6 +261,8 @@ func (w *When) WaitForWorkflow(options ...interface{}) *When { workflowName = v case Condition: condition = v + case WorkflowCompletionOkay: + workflowCompletionOkay = v default: w.t.Fatal("unknown option type: " + reflect.TypeOf(opt).String()) } @@ -294,11 +301,13 @@ func (w *When) WaitForWorkflow(options ...interface{}) *When { w.wf = wf return w } - // once done the workflow is done, the condition can never be met - // rather than wait maybe 30s for something that can never happen - if ok, _ = ToBeDone(wf); ok { - w.t.Errorf("condition never and cannot be met because the workflow is done") - return w + if !workflowCompletionOkay { + // once done the workflow is done, the condition can never be met + // rather than wait maybe 30s for something that can never happen + if ok, _ = ToBeDone(wf); ok { + w.t.Errorf("condition never and cannot be met because the workflow is done") + return w + } } } else { w.t.Errorf("not ok") @@ -337,6 +346,14 @@ func (w *When) WaitForWorkflowList(listOptions metav1.ListOptions, condition fun } } +func (w *When) WaitForWorkflowDeletion() *When { + fieldSelector := "metadata.name=" + w.wf.Name + opts := metav1.ListOptions{LabelSelector: Label, FieldSelector: fieldSelector} + return w.WaitForWorkflowList(opts, func(list []wfv1.Workflow) bool { + return len(list) == 0 + }) +} + func (w *When) hydrateWorkflow(wf *wfv1.Workflow) { w.t.Helper() err := w.hydrator.Hydrate(wf) @@ -366,6 +383,17 @@ func (w *When) DeleteWorkflow() *When { return w } +func (w *When) RemoveFinalizers(shouldErr bool) *When { + w.t.Helper() + ctx := context.Background() + + _, err := w.client.Patch(ctx, w.wf.Name, types.MergePatchType, []byte("{\"metadata\":{\"finalizers\":null}}"), metav1.PatchOptions{}) + if err != nil && shouldErr { + w.t.Fatal(err) + } + return w +} + type PodCondition func(p *corev1.Pod) bool var ( diff --git a/test/e2e/functional/stop-terminate-daemon.yaml b/test/e2e/functional/stop-terminate-daemon.yaml new file mode 100644 index 000000000000..410b2528145e --- /dev/null +++ b/test/e2e/functional/stop-terminate-daemon.yaml @@ -0,0 +1,26 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: stop-terminate-daemon- +spec: + entrypoint: main + templates: + - name: main + steps: + - - name: Daemon + template: sleep + - - name: B + template: pass + + - name: sleep + daemon: true + container: + image: argoproj/argosay:v1 + command: [ sleep ] + args: [ "999" ] + + - name: pass + container: + image: argoproj/argosay:v1 + command: [ sleep ] + args: [ "999" ] \ No newline at end of file diff --git a/test/e2e/functional_test.go b/test/e2e/functional_test.go index 8a355e714542..defac6d78af7 100644 --- a/test/e2e/functional_test.go +++ b/test/e2e/functional_test.go @@ -1,11 +1,10 @@ -//go:build functional -// +build functional +//go:build corefunctional +// +build corefunctional package e2e import ( "context" - "strings" "testing" "time" @@ -25,7 +24,6 @@ type FunctionalSuite struct { } func (s *FunctionalSuite) TestArchiveStrategies() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow(`@testdata/archive-strategies.yaml`). When(). @@ -46,7 +44,7 @@ func (s *FunctionalSuite) TestDeletingPendingPod() { SubmitWorkflow(). WaitForWorkflow(fixtures.ToStart). Exec("kubectl", []string{"-n", "argo", "delete", "pod", "-l", "workflows.argoproj.io/workflow"}, fixtures.OutputRegexp(`pod "pending-.*" deleted`)). - Wait(3*time.Second). // allow 3s for reconciliation, we'll create a new pod + Wait(time.Duration(3*fixtures.EnvFactor)*time.Second). // allow 3s for reconciliation, we'll create a new pod Exec("kubectl", []string{"-n", "argo", "get", "pod", "-l", "workflows.argoproj.io/workflow"}, fixtures.OutputRegexp(`pending-.*Pending`)) } @@ -270,84 +268,6 @@ func (s *FunctionalSuite) TestVolumeClaimTemplate() { }) } -func (s *FunctionalSuite) TestEventOnNodeFailSentAsPod() { - // Test whether an WorkflowFailed event (with appropriate message) is emitted in case of node failure - var uid types.UID - var nodeId types.UID - var nodeName string - // Update controller config map to set nodeEvents.sendAsPod to true - ctx := context.Background() - configMap, err := s.KubeClient.CoreV1().ConfigMaps(fixtures.Namespace).Get( - ctx, - "workflow-controller-configmap", - metav1.GetOptions{}, - ) - if err != nil { - s.T().Fatal(err) - } - originalData := make(map[string]string) - for key, value := range configMap.Data { - originalData[key] = value - } - configMap.Data["nodeEvents"] = "\n sendAsPod: true" - s.Given(). - Workflow("@expectedfailures/failed-step-event.yaml"). - When(). - UpdateConfigMap( - "workflow-controller-configmap", - configMap.Data, - map[string]string{}). - // Give controller enough time to update from config map change - Wait(5*time.Second). - SubmitWorkflow(). - WaitForWorkflow(). - Then(). - ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - uid = metadata.UID - }). - ExpectWorkflowNode(func(status wfv1.NodeStatus) bool { - return strings.HasPrefix(status.Name, "failed-step-event-") - }, func(t *testing.T, status *wfv1.NodeStatus, pod *apiv1.Pod) { - nodeId = pod.UID - nodeName = status.Name - }). - ExpectAuditEvents( - func(event apiv1.Event) bool { - return (event.InvolvedObject.Kind == workflow.WorkflowKind && event.InvolvedObject.UID == uid) || (event.InvolvedObject.Kind == "Pod" && event.InvolvedObject.UID == nodeId && strings.HasPrefix(event.Reason, "Workflow")) - }, - 4, - func(t *testing.T, es []apiv1.Event) { - for _, e := range es { - switch e.Reason { - case "WorkflowNodeRunning": - assert.Equal(t, e.InvolvedObject.Kind, "Pod") - assert.Contains(t, e.Message, "Running node failed-step-event-") - assert.Equal(t, e.Annotations["workflows.argoproj.io/node-name"], nodeName) - assert.Equal(t, e.Annotations["workflows.argoproj.io/workflow-uid"], string(uid)) - assert.Contains(t, e.Annotations["workflows.argoproj.io/workflow-name"], "failed-step-event-") - case "WorkflowRunning": - case "WorkflowNodeFailed": - assert.Equal(t, e.InvolvedObject.Kind, "Pod") - assert.Contains(t, e.Message, "Failed node failed-step-event-") - assert.Equal(t, e.Annotations["workflows.argoproj.io/node-type"], "Pod") - assert.Equal(t, e.Annotations["workflows.argoproj.io/node-name"], nodeName) - assert.Contains(t, e.Annotations["workflows.argoproj.io/workflow-name"], "failed-step-event-") - assert.Equal(t, e.Annotations["workflows.argoproj.io/workflow-uid"], string(uid)) - case "WorkflowFailed": - assert.Contains(t, e.Message, "exit code 1") - default: - assert.Fail(t, e.Reason) - } - } - }, - ). - When(). - // Reset config map to original settings - UpdateConfigMap("workflow-controller-configmap", originalData, map[string]string{}). - // Give controller enough time to update from config map change - Wait(5 * time.Second) -} - func (s *FunctionalSuite) TestEventOnNodeFail() { // Test whether an WorkflowFailed event (with appropriate message) is emitted in case of node failure var uid types.UID @@ -419,29 +339,6 @@ func (s *FunctionalSuite) TestEventOnWorkflowSuccess() { ) } -func (s *FunctionalSuite) TestLargeWorkflowFailure() { - var uid types.UID - s.Given(). - Workflow("@expectedfailures/large-workflow.yaml"). - When(). - SubmitWorkflow(). - WaitForWorkflow(120*time.Second). - Then(). - ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - uid = metadata.UID - }). - ExpectAuditEvents( - fixtures.HasInvolvedObject(workflow.WorkflowKind, uid), - 2, - func(t *testing.T, e []apiv1.Event) { - assert.Equal(t, "WorkflowRunning", e[0].Reason) - - assert.Equal(t, "WorkflowFailed", e[1].Reason) - assert.Contains(t, e[1].Message, "workflow templates are limited to 128KB, this workflow is 128001 bytes") - }, - ) -} - func (s *FunctionalSuite) TestEventOnPVCFail() { // Test whether an WorkflowFailed event (with appropriate message) is emitted in case of error in creating the PVC var uid types.UID @@ -467,7 +364,6 @@ func (s *FunctionalSuite) TestEventOnPVCFail() { } func (s *FunctionalSuite) TestArtifactRepositoryRef() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow("@testdata/artifact-repository-ref.yaml"). When(). @@ -615,7 +511,6 @@ spec: } func (s *FunctionalSuite) TestParameterAggregation() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow("@functional/param-aggregation.yaml"). When(). @@ -780,7 +675,7 @@ spec: `). When(). SubmitWorkflow(). - WaitForWorkflow(). + WaitForWorkflow(10 * time.Second). Then(). ExpectWorkflow(func(t *testing.T, md *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { assert.Equal(t, wfv1.WorkflowFailed, status.Phase) @@ -939,7 +834,6 @@ spec: } func (s *FunctionalSuite) TestOutputArtifactS3BucketCreationEnabled() { - s.Need(fixtures.BaseLayerArtifacts) s.Given(). Workflow("@testdata/output-artifact-with-s3-bucket-creation-enabled.yaml"). When(). diff --git a/test/e2e/hooks_test.go b/test/e2e/hooks_test.go index 41342c44ec71..f613aa1c251e 100644 --- a/test/e2e/hooks_test.go +++ b/test/e2e/hooks_test.go @@ -8,7 +8,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" apiv1 "k8s.io/api/core/v1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -52,7 +51,6 @@ spec: }).ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { return strings.Contains(status.Name, "hook") }, func(t *testing.T, status *v1alpha1.NodeStatus, pod *apiv1.Pod) { - assert.Equal(t, v1alpha1.NodeSucceeded, status.Phase) }) } @@ -71,7 +69,6 @@ spec: - - name: step-1 hooks: exit: - expression: steps["step-1"].status == "Running" template: http success: expression: steps["step-1"].status == "Succeeded" @@ -85,15 +82,15 @@ spec: WaitForWorkflow(fixtures.ToBeSucceeded). Then(). ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *v1alpha1.WorkflowStatus) { - assert.Equal(t, status.Phase, v1alpha1.WorkflowSucceeded) + assert.Equal(t, v1alpha1.WorkflowSucceeded, status.Phase) }).ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { return strings.Contains(status.Name, "hook") }, func(t *testing.T, status *v1alpha1.NodeStatus, pod *apiv1.Pod) { - assert.Equal(t, v1alpha1.NodeSucceeded, status.Phase) }) } func TestHooksSuite(t *testing.T) { - suite.Run(t, new(HooksSuite)) + // TODO: Tests are temporarily disabled: "https://github.com/argoproj/argo-workflows/issues/9591" + //suite.Run(t, new(HooksSuite)) } diff --git a/test/e2e/http_artifacts_test.go b/test/e2e/http_artifacts_test.go new file mode 100644 index 000000000000..dc63ff8edab8 --- /dev/null +++ b/test/e2e/http_artifacts_test.go @@ -0,0 +1,60 @@ +//go:build functional +// +build functional + +package e2e + +import ( + "testing" + + "github.com/stretchr/testify/suite" + + "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" +) + +type HttpArtifactsSuite struct { + fixtures.E2ESuite +} + +func (s *HttpArtifactsSuite) TestInputArtifactHttp() { + s.Given(). + Workflow("@testdata/http/input-artifact-http.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + +func (s *HttpArtifactsSuite) TestOutputArtifactHttp() { + s.Given(). + Workflow("@testdata/http/output-artifact-http.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + +func (s *HttpArtifactsSuite) TestBasicAuthArtifactHttp() { + s.Given(). + Workflow("@testdata/http/basic-auth-artifact-http.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + +func (s *HttpArtifactsSuite) TestOAuthArtifactHttp() { + s.Given(). + Workflow("@testdata/http/oauth-artifact-http.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + +func (s *HttpArtifactsSuite) TestClientCertAuthArtifactHttp() { + s.Given(). + Workflow("@testdata/http/clientcert-auth-artifact-http.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + +func TestHttpArtifactsSuite(t *testing.T) { + suite.Run(t, new(HttpArtifactsSuite)) +} diff --git a/test/e2e/http_logger.go b/test/e2e/http_logger.go deleted file mode 100644 index 7b3f99136d54..000000000000 --- a/test/e2e/http_logger.go +++ /dev/null @@ -1,9 +0,0 @@ -package e2e - -import log "github.com/sirupsen/logrus" - -type httpLogger struct{} - -func (d *httpLogger) Logf(fmt string, args ...interface{}) { - log.Debugf(fmt, args...) -} diff --git a/test/e2e/manifests/events/kustomization.yaml b/test/e2e/manifests/events/kustomization.yaml index fe41d7ee5292..eb4dc38183cd 100644 --- a/test/e2e/manifests/events/kustomization.yaml +++ b/test/e2e/manifests/events/kustomization.yaml @@ -11,6 +11,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml +- ../mixins/minio-deployment.yaml commonLabels: app.kubernetes.io/part-of: argo diff --git a/test/e2e/manifests/minimal/kustomization.yaml b/test/e2e/manifests/minimal/kustomization.yaml index eeb947c47ad4..e29f4ca34dcc 100644 --- a/test/e2e/manifests/minimal/kustomization.yaml +++ b/test/e2e/manifests/minimal/kustomization.yaml @@ -13,6 +13,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml +- ../mixins/minio-deployment.yaml commonLabels: app.kubernetes.io/part-of: argo diff --git a/test/e2e/manifests/mixins/minio-deployment.yaml b/test/e2e/manifests/mixins/minio-deployment.yaml new file mode 100644 index 000000000000..4490d232f43a --- /dev/null +++ b/test/e2e/manifests/mixins/minio-deployment.yaml @@ -0,0 +1,15 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: minio +spec: + template: + spec: + containers: + - name: main + lifecycle: + postStart: + exec: + command: ["mkdir", "-p", "/data/my-bucket", "&&", "mkdir", "-p", "/data/my-bucket-2", "&&", "mkdir", "-p", "/data/my-bucket-3"] + + \ No newline at end of file diff --git a/test/e2e/manifests/mixins/workflow-controller-configmap.yaml b/test/e2e/manifests/mixins/workflow-controller-configmap.yaml index 8501ec97f48f..ea61fcbbec92 100644 --- a/test/e2e/manifests/mixins/workflow-controller-configmap.yaml +++ b/test/e2e/manifests/mixins/workflow-controller-configmap.yaml @@ -6,12 +6,10 @@ data: workflowDefaults: | spec: activeDeadlineSeconds: 300 - ttlStrategy: - secondsAfterCompletion: 600 podSpecPatch: | terminationGracePeriodSeconds: 3 executor: | - imagePullPolicy: Never + imagePullPolicy: IfNotPresent resources: requests: cpu: 0.1 @@ -23,4 +21,3 @@ data: completed: 10 failed: 2 errored: 2 - kubeletInsecure: "true" diff --git a/test/e2e/manifests/mysql/kustomization.yaml b/test/e2e/manifests/mysql/kustomization.yaml index d4111a0e0ce0..032ae2248a3f 100644 --- a/test/e2e/manifests/mysql/kustomization.yaml +++ b/test/e2e/manifests/mysql/kustomization.yaml @@ -12,6 +12,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml + - ../mixins/minio-deployment.yaml commonLabels: app.kubernetes.io/part-of: argo diff --git a/test/e2e/manifests/postgres/kustomization.yaml b/test/e2e/manifests/postgres/kustomization.yaml index 9ee763062f00..40d88ec8ada3 100644 --- a/test/e2e/manifests/postgres/kustomization.yaml +++ b/test/e2e/manifests/postgres/kustomization.yaml @@ -9,6 +9,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml +- ../mixins/minio-deployment.yaml commonLabels: app.kubernetes.io/part-of: argo diff --git a/test/e2e/manifests/prometheus/kustomization.yaml b/test/e2e/manifests/prometheus/kustomization.yaml index 3738b023c2a3..4f103ed52e66 100644 --- a/test/e2e/manifests/prometheus/kustomization.yaml +++ b/test/e2e/manifests/prometheus/kustomization.yaml @@ -10,6 +10,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml + - ../mixins/minio-deployment.yaml commonLabels: "app.kubernetes.io/part-of": "argo" diff --git a/test/e2e/manifests/sso/kustomization.yaml b/test/e2e/manifests/sso/kustomization.yaml index 8a201a739ab8..7fa4e2c07f38 100644 --- a/test/e2e/manifests/sso/kustomization.yaml +++ b/test/e2e/manifests/sso/kustomization.yaml @@ -9,6 +9,7 @@ patchesStrategicMerge: - ../mixins/workflow-controller-configmap.yaml - ../mixins/workflow-controller-deployment.yaml - ../mixins/cluster-workflow-template-rbac.yaml +- ../mixins/minio-deployment.yaml commonLabels: app.kubernetes.io/part-of: argo diff --git a/test/e2e/metrics_test.go b/test/e2e/metrics_test.go index e69628796267..616e72a6fb30 100644 --- a/test/e2e/metrics_test.go +++ b/test/e2e/metrics_test.go @@ -30,7 +30,7 @@ func (s *MetricsSuite) e(t *testing.T) *httpexpect.Expect { BaseURL: baseUrlMetrics, Reporter: httpexpect.NewRequireReporter(t), Printers: []httpexpect.Printer{ - httpexpect.NewDebugPrinter(&httpLogger{}, true), + httpexpect.NewDebugPrinter(s.T(), true), }, Client: httpClient, }) diff --git a/test/e2e/pod_cleanup_test.go b/test/e2e/pod_cleanup_test.go index 0813a7bd95d2..ac1bcbd02bcc 100644 --- a/test/e2e/pod_cleanup_test.go +++ b/test/e2e/pod_cleanup_test.go @@ -367,6 +367,26 @@ spec: WaitForPod(fixtures.PodDeleted) } +func (s *PodCleanupSuite) TestOnWorkflowTemplate() { + s.Given(). + WorkflowTemplate(` +metadata: + name: test-pod-cleanup +spec: + podGC: + strategy: OnWorkflowCompletion + entrypoint: main + templates: + - name: main + container: + image: argoproj/argosay:v2 +`). + When(). + CreateWorkflowTemplates(). + SubmitWorkflowsFromWorkflowTemplates(). + WaitForPod(fixtures.PodDeleted) +} + func TestPodCleanupSuite(t *testing.T) { suite.Run(t, new(PodCleanupSuite)) } diff --git a/test/e2e/progress_test.go b/test/e2e/progress_test.go index d6b4a161e6ba..6ff006fdde5a 100644 --- a/test/e2e/progress_test.go +++ b/test/e2e/progress_test.go @@ -6,6 +6,7 @@ package e2e import ( "fmt" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -33,6 +34,7 @@ func (s *ProgressSuite) TestDefaultProgress() { } func (s *ProgressSuite) TestLoggedProgress() { + s.T().SkipNow() toHaveProgress := func(p wfv1.Progress) fixtures.Condition { return func(wf *wfv1.Workflow) (bool, string) { return wf.Status.Nodes[wf.Name].Progress == p && @@ -44,7 +46,7 @@ func (s *ProgressSuite) TestLoggedProgress() { Workflow("@testdata/progress-workflow.yaml"). When(). SubmitWorkflow(). - WaitForWorkflow(toHaveProgress("50/100")). + WaitForWorkflow(toHaveProgress("50/100"), time.Minute). // ARGO_PROGRESS_PATCH_TICK_DURATION=1m WaitForWorkflow(). Then(). ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { diff --git a/test/e2e/resource_template_test.go b/test/e2e/resource_template_test.go index a77db6b27b01..16df428ffa32 100644 --- a/test/e2e/resource_template_test.go +++ b/test/e2e/resource_template_test.go @@ -96,6 +96,53 @@ spec: }) } +func (s *ResourceTemplateSuite) TestResourceTemplateWithArtifact() { + s.Given(). + Workflow(` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: k8s-resource-tmpl-with-artifact- +spec: + serviceAccount: argo + entrypoint: main + templates: + - name: main + serviceAccountName: argo + inputs: + artifacts: + - name: manifest + path: /tmp/manifestfrom-path.yaml + raw: + data: | + apiVersion: v1 + kind: Pod + metadata: + generateName: k8s-pod-resource- + spec: + serviceAccountName: argo + containers: + - name: argosay-container + image: argoproj/argosay:v2 + command: ["/argosay"] + restartPolicy: Never + resource: + action: create + successCondition: status.phase == Succeeded + failureCondition: status.phase == Failed + manifestFrom: + artifact: + name: manifest +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Then(). + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowSucceeded, status.Phase) + }) +} + func TestResourceTemplateSuite(t *testing.T) { suite.Run(t, new(ResourceTemplateSuite)) } diff --git a/test/e2e/retry_test.go b/test/e2e/retry_test.go new file mode 100644 index 000000000000..5806965bf3d5 --- /dev/null +++ b/test/e2e/retry_test.go @@ -0,0 +1,109 @@ +//go:build functional +// +build functional + +package e2e + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" +) + +type RetryTestSuite struct { + fixtures.E2ESuite +} + +func (s *RetryTestSuite) TestRetryLimit() { + s.Given(). + Workflow(` +metadata: + generateName: test-retry-limit- +spec: + entrypoint: main + templates: + - name: main + retryStrategy: + limit: 0 + backoff: + duration: 2s + factor: 2 + maxDuration: 5m + container: + name: main + image: 'argoproj/argosay:v2' + args: [ exit, "1" ] +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeFailed). + Then(). + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + assert.Equal(t, "No more retries left", status.Message) + }) +} + +func (s *RetryTestSuite) TestRetryBackoff() { + s.Given(). + Workflow(` +metadata: + generateName: test-backoff-strategy- +spec: + entrypoint: main + templates: + - name: main + retryStrategy: + limit: '10' + backoff: + duration: 10s + maxDuration: 1m + container: + name: main + image: 'argoproj/argosay:v2' + args: [ exit, "1" ] +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(time.Minute). + Then(). + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + assert.LessOrEqual(t, len(status.Nodes), 10) + }) + s.Given(). + Workflow(` +metadata: + generateName: test-backoff-strategy- +spec: + entrypoint: main + templates: + - name: main + retryStrategy: + limit: 10 + backoff: + duration: 10s + maxDuration: 1m + container: + name: main + image: 'argoproj/argosay:v2' + args: [ exit, "1" ] +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(time.Minute). + Then(). + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + assert.LessOrEqual(t, len(status.Nodes), 10) + }) +} + +func TestRetrySuite(t *testing.T) { + suite.Run(t, new(RetryTestSuite)) +} diff --git a/test/e2e/run_as_not_root_test.go b/test/e2e/run_as_not_root_test.go index 94154c3d2615..10a60d0f3893 100644 --- a/test/e2e/run_as_not_root_test.go +++ b/test/e2e/run_as_not_root_test.go @@ -16,7 +16,6 @@ type RunAsNonRootSuite struct { } func (s *RunAsNonRootSuite) TestRunAsNonRootWorkflow() { - s.Need(fixtures.None(fixtures.Docker)) s.Given(). Workflow("@smoke/runasnonroot-workflow.yaml"). When(). @@ -25,7 +24,6 @@ func (s *RunAsNonRootSuite) TestRunAsNonRootWorkflow() { } func (s *RunAsNonRootSuite) TestRunAsNonRootWithOutputParams() { - s.Need(fixtures.None(fixtures.Docker, fixtures.Kubelet)) s.Given(). Workflow("@smoke/runasnonroot-output-params-pipeline.yaml"). When(). diff --git a/test/e2e/signals_test.go b/test/e2e/signals_test.go index c5968ee9e5ec..2e116f12362a 100644 --- a/test/e2e/signals_test.go +++ b/test/e2e/signals_test.go @@ -24,12 +24,6 @@ type SignalsSuite struct { fixtures.E2ESuite } -func (s *SignalsSuite) SetupSuite() { - s.E2ESuite.SetupSuite() - // Because k8ssapi and kubelet execute `sh -c 'kill 15 1'` to they do not work. - s.Need(fixtures.None(fixtures.Kubelet)) -} - func (s *SignalsSuite) TestStopBehavior() { s.Given(). Workflow("@functional/stop-terminate.yaml"). @@ -56,6 +50,24 @@ func (s *SignalsSuite) TestStopBehavior() { }) } +func (s *SignalsSuite) TestStopBehaviorWithDaemon() { + s.Given(). + Workflow("@functional/stop-terminate-daemon.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToHaveRunningPod, kill2xDuration). + ShutdownWorkflow(wfv1.ShutdownStrategyStop). + WaitForWorkflow(kill2xDuration). + Then(). + ExpectWorkflow(func(t *testing.T, m *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Contains(t, []wfv1.WorkflowPhase{wfv1.WorkflowFailed, wfv1.WorkflowError}, status.Phase) + nodeStatus := status.Nodes.FindByDisplayName("Daemon") + if assert.NotNil(t, nodeStatus) { + assert.Equal(t, wfv1.NodeSucceeded, nodeStatus.Phase) + } + }) +} + func (s *SignalsSuite) TestTerminateBehavior() { s.Given(). Workflow("@functional/stop-terminate.yaml"). @@ -80,7 +92,6 @@ func (s *SignalsSuite) TestTerminateBehavior() { // Tests that new pods are never created once a stop shutdown strategy has been added func (s *SignalsSuite) TestDoNotCreatePodsUnderStopBehavior() { - s.Need(fixtures.None(fixtures.Docker)) s.Given(). Workflow("@functional/stop-terminate-2.yaml"). When(). @@ -110,7 +121,6 @@ func (s *SignalsSuite) TestSidecars() { // make sure Istio/Anthos and other sidecar injectors will work func (s *SignalsSuite) TestInjectedSidecar() { - s.Need(fixtures.None(fixtures.Emissary)) // emissary cannot kill this s.Given(). Workflow("@testdata/sidecar-injected-workflow.yaml"). When(). @@ -118,12 +128,25 @@ func (s *SignalsSuite) TestInjectedSidecar() { WaitForWorkflow(fixtures.ToBeSucceeded, kill2xDuration) } -func (s *SignalsSuite) TestInjectedSidecarKillAnnotation() { +func (s *SignalsSuite) TestSubProcess() { s.Given(). - Workflow("@testdata/sidecar-injected-kill-annotation-workflow.yaml"). + Workflow("@testdata/subprocess-workflow.yaml"). When(). SubmitWorkflow(). - WaitForWorkflow(fixtures.ToBeSucceeded, kill2xDuration) + WaitForWorkflow() +} + +func (s *SignalsSuite) TestSignaled() { + s.Given(). + Workflow("@testdata/signaled-workflow.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowFailed, status.Phase) + assert.Equal(t, "Error (exit code 143)", status.Message) + }) } func TestSignalsSuite(t *testing.T) { diff --git a/test/e2e/testdata/artifact-workflow-azure.yaml b/test/e2e/testdata/artifact-workflow-azure.yaml new file mode 100644 index 000000000000..07925bb937ef --- /dev/null +++ b/test/e2e/testdata/artifact-workflow-azure.yaml @@ -0,0 +1,36 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-azure- +spec: + entrypoint: main + artifactRepositoryRef: + configMap: azure-artifact-repositories + key: azure-v1 + templates: + - name: main + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + mkdir -p /out/subdirectory + echo ":) Hello Argo!" > /out/subdirectory/sub-file-1 + echo ":) Hello Argo!" > /out/subdirectory/sub-file-2 + echo ":) Hello Argo!" > /tmp/main-file + outputs: + artifacts: + - name: out + path: /out + azure: + blob: out + archive: + none: { } + - name: main-file + path: /tmp/main-file + azure: + blob: main-file + archive: + none: { } diff --git a/test/e2e/testdata/artifact-workflow.yaml b/test/e2e/testdata/artifact-workflow.yaml index 62abd0fe30a8..11f47c8b6175 100644 --- a/test/e2e/testdata/artifact-workflow.yaml +++ b/test/e2e/testdata/artifact-workflow.yaml @@ -8,13 +8,26 @@ spec: - name: main container: image: argoproj/argosay:v2 - args: - - echo - - ":) Hello Argo!" - - /tmp/main-file + command: + - sh + - -c + args: + - | + mkdir -p /out/subdirectory + echo ":) Hello Argo!" > /out/subdirectory/sub-file-1 + echo ":) Hello Argo!" > /out/subdirectory/sub-file-2 + echo ":) Hello Argo!" > /tmp/main-file outputs: artifacts: + - name: out + path: /out + s3: + key: out + archive: + none: { } - name: main-file path: /tmp/main-file + s3: + key: main-file archive: none: { } diff --git a/test/e2e/testdata/artifactgc/artgc-from-template.yaml b/test/e2e/testdata/artifactgc/artgc-from-template.yaml new file mode 100644 index 000000000000..3add711a0dcf --- /dev/null +++ b/test/e2e/testdata/artifactgc/artgc-from-template.yaml @@ -0,0 +1,8 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artgc-from-template- +spec: + workflowTemplateRef: + name: artgc-template + clusterScope: false \ No newline at end of file diff --git a/test/e2e/testdata/artifactgc/artgc-multi-strategy-multi-anno.yaml b/test/e2e/testdata/artifactgc/artgc-multi-strategy-multi-anno.yaml new file mode 100644 index 000000000000..f4673bbf9669 --- /dev/null +++ b/test/e2e/testdata/artifactgc/artgc-multi-strategy-multi-anno.yaml @@ -0,0 +1,123 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: two-artgc- + #finalizers: + # - "blah" +spec: + entrypoint: entrypoint + artifactGC: + strategy: OnWorkflowCompletion + podGC: + strategy: "" + templates: + - name: entrypoint + steps: + - - name: call-first + template: first + - - name: call-second + template: second + - name: first + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "hello world" > /tmp/message + outputs: + artifacts: + - name: first-on-completion-1 + path: /tmp/message + s3: + key: first-on-completion-1 + bucket: my-bucket-2 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowCompletion + serviceAccountName: default + podMetadata: + annotations: + annotation-key-1: annotation-value-1 + annotation-key-2: annotation-value-2 + - name: first-on-completion-2 + path: /tmp/message + s3: + key: first-on-completion-2 + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + - name: first-no-deletion + path: /tmp/message + s3: + key: first-no-deletion + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: Never + + - name: second + archiveLocation: + s3: + key: default-to-be-overridden + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "hello world" > /tmp/message + outputs: + artifacts: + - name: second-on-deletion + s3: + key: second-on-deletion + path: /tmp/message + artifactGC: + strategy: OnWorkflowDeletion + - name: second-on-completion + path: /tmp/message + s3: + key: second-on-completion + bucket: my-bucket-2 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowCompletion \ No newline at end of file diff --git a/test/e2e/testdata/artifactgc/artgc-step-wf-tmpl.yaml b/test/e2e/testdata/artifactgc/artgc-step-wf-tmpl.yaml new file mode 100644 index 000000000000..f7968cecaefa --- /dev/null +++ b/test/e2e/testdata/artifactgc/artgc-step-wf-tmpl.yaml @@ -0,0 +1,15 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artgc-step-wf-tmpl- +spec: + entrypoint: artgc-step-wf-tmpl + templates: + - name: artgc-step-wf-tmpl + steps: + - - name: call-template + templateRef: + name: artgc-template + template: artgc-template + clusterScope: false + \ No newline at end of file diff --git a/test/e2e/testdata/artifactgc/artgc-template.yaml b/test/e2e/testdata/artifactgc/artgc-template.yaml new file mode 100644 index 000000000000..e109de210c0c --- /dev/null +++ b/test/e2e/testdata/artifactgc/artgc-template.yaml @@ -0,0 +1,52 @@ +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: artgc-template +spec: + workflowMetadata: + labels: + workflows.argoproj.io/test: "true" + entrypoint: artgc-template + templates: + - name: artgc-template + container: + image: argoproj/argosay:v2 + command: + - sh + - -c + args: + - | + echo "hello world" + echo "hello world" > /tmp/message + outputs: + artifacts: + - name: on-completion + path: /tmp/message + s3: + key: on-completion + bucket: my-bucket-2 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowCompletion + - name: on-deletion + path: /tmp/message + s3: + key: on-deletion + bucket: my-bucket-2 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowDeletion \ No newline at end of file diff --git a/test/e2e/testdata/basic-artifact-workflow-azure.yaml b/test/e2e/testdata/basic-artifact-workflow-azure.yaml new file mode 100644 index 000000000000..48584f410bd8 --- /dev/null +++ b/test/e2e/testdata/basic-artifact-workflow-azure.yaml @@ -0,0 +1,41 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: basic-artifact-azure- +spec: + entrypoint: main + artifactRepositoryRef: + configMap: azure-artifact-repositories + key: azure-v1 + templates: + - name: main + dag: + tasks: + - name: hello + template: hello + - name: bye + template: bye + dependencies: + - hello + - name: hello + container: + image: argoproj/argosay:v2 + args: + - echo + - hello world + - /tmp/hello_world.txt + outputs: + artifacts: + - name: hello_world + path: /tmp/hello_world.txt + - name: bye + container: + image: argoproj/argosay:v2 + args: + - echo + - bye world + - /tmp/bye_world.txt + outputs: + artifacts: + - name: bye_world + path: /tmp/bye_world.txt diff --git a/test/e2e/testdata/basic-artifact-workflow.yaml b/test/e2e/testdata/basic-artifact-workflow.yaml new file mode 100644 index 000000000000..e5490ee8f4fa --- /dev/null +++ b/test/e2e/testdata/basic-artifact-workflow.yaml @@ -0,0 +1,38 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: basic-artifact- +spec: + entrypoint: main + templates: + - name: main + dag: + tasks: + - name: hello + template: hello + - name: bye + template: bye + dependencies: + - hello + - name: hello + container: + image: argoproj/argosay:v2 + args: + - echo + - hello world + - /tmp/hello_world.txt + outputs: + artifacts: + - name: hello_world + path: /tmp/hello_world.txt + - name: bye + container: + image: argoproj/argosay:v2 + args: + - echo + - bye world + - /tmp/bye_world.txt + outputs: + artifacts: + - name: bye_world + path: /tmp/bye_world.txt \ No newline at end of file diff --git a/test/e2e/testdata/http/basic-auth-artifact-http.yaml b/test/e2e/testdata/http/basic-auth-artifact-http.yaml new file mode 100644 index 000000000000..1578e39a8995 --- /dev/null +++ b/test/e2e/testdata/http/basic-auth-artifact-http.yaml @@ -0,0 +1,26 @@ +# This example demonstrates bucket-creation enabled for s3 output artifacts, when the target bucket does not exist. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: basic-auth-artifact-http- +spec: + entrypoint: main + templates: + - name: main + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: http://httpbin:9100/basic-auth/admin/password + auth: + basicAuth: + usernameSecret: + name: my-httpbin-cred + key: user + passwordSecret: + name: my-httpbin-cred + key: pass + container: + image: argoproj/argosay:v2 + command: [cat, "/my-artifact"] diff --git a/test/e2e/testdata/http/clientcert-auth-artifact-http.yaml b/test/e2e/testdata/http/clientcert-auth-artifact-http.yaml new file mode 100644 index 000000000000..4dc1fb5a7fa3 --- /dev/null +++ b/test/e2e/testdata/http/clientcert-auth-artifact-http.yaml @@ -0,0 +1,26 @@ +# This example demonstrates bucket-creation enabled for s3 output artifacts, when the target bucket does not exist. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: basic-auth-artifact-http- +spec: + entrypoint: main + templates: + - name: main + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: http://httpbin:9100/get + auth: + clientCert: + clientCertSecret: + name: my-httpbin-cred + key: cert.pem + clientKeySecret: + name: my-httpbin-cred + key: key.pem + container: + image: argoproj/argosay:v2 + command: [cat, "/my-artifact"] diff --git a/test/e2e/testdata/http/input-artifact-http.yaml b/test/e2e/testdata/http/input-artifact-http.yaml new file mode 100644 index 000000000000..71c1b47416d5 --- /dev/null +++ b/test/e2e/testdata/http/input-artifact-http.yaml @@ -0,0 +1,18 @@ +# This example demonstrates bucket-creation enabled for s3 output artifacts, when the target bucket does not exist. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: input-artifact-http- +spec: + entrypoint: main + templates: + - name: main + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: http://httpbin:9100/get + container: + image: argoproj/argosay:v2 + command: [cat, "/my-artifact"] diff --git a/test/e2e/testdata/http/oauth-artifact-http.yaml b/test/e2e/testdata/http/oauth-artifact-http.yaml new file mode 100644 index 000000000000..b6d6ac368da1 --- /dev/null +++ b/test/e2e/testdata/http/oauth-artifact-http.yaml @@ -0,0 +1,29 @@ +# This example demonstrates bucket-creation enabled for s3 output artifacts, when the target bucket does not exist. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: basic-auth-artifact-http- +spec: + entrypoint: main + templates: + - name: main + inputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: http://httpbin:9100/bearer + auth: + oauth2: + clientIDSecret: + name: my-httpbin-cred + key: clientID + clientSecretSecret: + name: my-httpbin-cred + key: clientSecret + tokenURLSecret: + name: my-httpbin-cred + key: tokenURL + container: + image: argoproj/argosay:v2 + command: [cat, "/my-artifact"] diff --git a/test/e2e/testdata/http/output-artifact-http.yaml b/test/e2e/testdata/http/output-artifact-http.yaml new file mode 100644 index 000000000000..29db303aee57 --- /dev/null +++ b/test/e2e/testdata/http/output-artifact-http.yaml @@ -0,0 +1,19 @@ +# This example demonstrates bucket-creation enabled for s3 output artifacts, when the target bucket does not exist. +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: output-artifact-http- +spec: + entrypoint: main + templates: + - name: main + outputs: + artifacts: + - name: my-art + path: /my-artifact + http: + url: http://httpbin:9100/put + container: + image: argoproj/argosay:v2 + command: [sh, -c] + args: ["echo testdata > /my-artifact"] diff --git a/test/e2e/testdata/progress-workflow.yaml b/test/e2e/testdata/progress-workflow.yaml index 7b698067508c..fb893dd2b371 100644 --- a/test/e2e/testdata/progress-workflow.yaml +++ b/test/e2e/testdata/progress-workflow.yaml @@ -17,4 +17,4 @@ spec: image: argoproj/argosay:v2 command: ["/bin/sh", "-c"] args: - - /argosay echo 50/100 $ARGO_PROGRESS_FILE && /argosay sleep 10s + - /argosay echo 50/100 $ARGO_PROGRESS_FILE && /argosay sleep 1m diff --git a/test/e2e/testdata/retry-nested-dag-test.yaml b/test/e2e/testdata/retry-nested-dag-test.yaml new file mode 100644 index 000000000000..4fedf5f22d1f --- /dev/null +++ b/test/e2e/testdata/retry-nested-dag-test.yaml @@ -0,0 +1,48 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: retry-nested-dag +spec: + entrypoint: outer-dag + templates: + - name: outer-dag + dag: + tasks: + - name: dag1-step1 + template: node-to-succeed + - name: dag1-step2 + dependencies: [dag1-step1] + template: middle-dag + - name: dag1-step3-tofail + dependencies: [dag1-step2] + template: node-to-fail + + - name: middle-dag + dag: + tasks: + - name: dag2-step1 + template: inner-dag + + - name: inner-dag + dag: + tasks: + - name: dag3-step1 + template: node-to-succeed + - name: dag3-step2 + dependencies: [dag3-step1] + template: node-to-succeed + - name: dag3-step3 + dependencies: [dag3-step2] + template: node-to-succeed + + - name: node-to-succeed + container: + image: argoproj/argosay:v2 + command: [ sh, -c ] + args: [ "exit 0" ] + + - name: node-to-fail + container: + image: argoproj/argosay:v2 + command: [sh, -c] + args: ["exit 1"] diff --git a/test/e2e/testdata/sidecar-injected-kill-annotation-workflow.yaml b/test/e2e/testdata/sidecar-injected-kill-annotation-workflow.yaml deleted file mode 100644 index dcf376e29d0d..000000000000 --- a/test/e2e/testdata/sidecar-injected-kill-annotation-workflow.yaml +++ /dev/null @@ -1,25 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: sidecar-injected-kill-annotation- -spec: - entrypoint: main - podMetadata: - annotations: - workflows.argoproj.io/kill-cmd-sidecar: '["sh", "-c", "kill -s%d -- -1"]' - podSpecPatch: | - terminationGracePeriodSeconds: 3 - containers: - - name: wait - - name: main - - name: sidecar - image: argoproj/argosay:v1 - command: - - sh - - -c - args: - - "sleep 999" - templates: - - name: main - container: - image: argoproj/argosay:v1 \ No newline at end of file diff --git a/test/e2e/testdata/signaled-workflow.yaml b/test/e2e/testdata/signaled-workflow.yaml new file mode 100644 index 000000000000..aa212580cd2e --- /dev/null +++ b/test/e2e/testdata/signaled-workflow.yaml @@ -0,0 +1,14 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: signaled- +spec: + entrypoint: main + templates: + - name: main + container: + image: argoproj/argosay:v1 + command: [ bash, -c ] + args: + - | + kill $BASHPID \ No newline at end of file diff --git a/test/e2e/testdata/subprocess-workflow.yaml b/test/e2e/testdata/subprocess-workflow.yaml new file mode 100644 index 000000000000..6301da9e61fb --- /dev/null +++ b/test/e2e/testdata/subprocess-workflow.yaml @@ -0,0 +1,15 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: subprocess- +spec: + entrypoint: main + templates: + - name: main + container: + image: argoproj/argosay:v1 + command: [ sh, -c ] + args: + - | + sleep 60 & + ps -aef \ No newline at end of file diff --git a/test/e2e/testdata/workflow-template-sub-test.yaml b/test/e2e/testdata/workflow-template-sub-test.yaml new file mode 100644 index 000000000000..3c94348a109f --- /dev/null +++ b/test/e2e/testdata/workflow-template-sub-test.yaml @@ -0,0 +1,18 @@ +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: workflow-template-submittable +spec: + entrypoint: whalesay-template + templates: + - name: whalesay-template + container: + image: 'argoproj/argosay:v2' + command: + - /argosay + args: + - echo + - '{{workflow.labels.arg-name}}' + workflowMetadata: + labels: + arg-name: myLabelArg diff --git a/test/e2e/workflow_configmap_substitution_test.go b/test/e2e/workflow_configmap_substitution_test.go index 94d59d93b218..e6f652fb398e 100644 --- a/test/e2e/workflow_configmap_substitution_test.go +++ b/test/e2e/workflow_configmap_substitution_test.go @@ -138,6 +138,50 @@ spec: WaitForWorkflow(fixtures.ToBeErrored) } +func (s *WorkflowConfigMapSelectorSubstitutionSuite) TestDefaultParamValueWhenNotFound() { + s.Given(). + Workflow(`apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: workflow-template-configmapkeyselector-wf-default-param- + label: + workflows.argoproj.io/test: "true" +spec: + entrypoint: whalesay + arguments: + parameters: + - name: message + value: msg + templates: + - name: whalesay + inputs: + parameters: + - name: message + valueFrom: + default: "default-val" + configMapKeyRef: + name: cmref-parameters + key: not-existing-key + container: + image: argoproj/argosay:v2 + args: + - echo + - "{{inputs.parameters.message}}" +`). + When(). + CreateConfigMap( + "cmref-parameters", + map[string]string{"msg": "hello world"}, + map[string]string{"workflows.argoproj.io/configmap-type": "Parameter"}). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded). + DeleteConfigMap("cmref-parameters"). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Equal(t, wfv1.WorkflowSucceeded, status.Phase) + }) +} + func TestConfigMapKeySelectorSubstitutionSuite(t *testing.T) { suite.Run(t, new(WorkflowConfigMapSelectorSubstitutionSuite)) } diff --git a/test/e2e/workflow_template_test.go b/test/e2e/workflow_template_test.go index 037f999b648f..a5e8bd6c3dc8 100644 --- a/test/e2e/workflow_template_test.go +++ b/test/e2e/workflow_template_test.go @@ -64,6 +64,19 @@ func (s *WorkflowTemplateSuite) TestSubmitWorkflowTemplateWithEnum() { }) } +func (s *WorkflowTemplateSuite) TestSubmitWorkflowTemplateWorkflowMetadataSubstitution() { + s.Given(). + WorkflowTemplate("@testdata/workflow-template-sub-test.yaml"). + When(). + CreateWorkflowTemplates(). + SubmitWorkflowsFromWorkflowTemplates(). + WaitForWorkflow(). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, status.Phase, v1alpha1.WorkflowSucceeded) + }) +} + func TestWorkflowTemplateSuite(t *testing.T) { suite.Run(t, new(WorkflowTemplateSuite)) } diff --git a/test/util/shared_index_informer.go b/test/util/shared_index_informer.go index 054cc8e93fec..59d0746b8d30 100644 --- a/test/util/shared_index_informer.go +++ b/test/util/shared_index_informer.go @@ -15,6 +15,7 @@ var _ cache.SharedIndexInformer = &SharedIndexInformer{} func NewSharedIndexInformer() *SharedIndexInformer { return &SharedIndexInformer{Indexer: NewIndexer()} } + func (s *SharedIndexInformer) AddEventHandler(cache.ResourceEventHandler) {} func (s *SharedIndexInformer) AddEventHandlerWithResyncPeriod(cache.ResourceEventHandler, time.Duration) { } @@ -26,3 +27,4 @@ func (s *SharedIndexInformer) LastSyncResourceVersion() string func (s *SharedIndexInformer) AddIndexers(cache.Indexers) error { return nil } func (s *SharedIndexInformer) GetIndexer() cache.Indexer { return s.Indexer } func (s *SharedIndexInformer) SetWatchErrorHandler(handler cache.WatchErrorHandler) error { return nil } +func (s *SharedIndexInformer) SetTransform(handler cache.TransformFunc) error { return nil } diff --git a/ui/.gitignore b/ui/.gitignore index 5c0dd18e851c..7b9d1bddd171 100644 --- a/ui/.gitignore +++ b/ui/.gitignore @@ -2,4 +2,4 @@ node_modules dist bundle .vscode - +yarn-error.log diff --git a/ui/.npmrc b/ui/.npmrc new file mode 100644 index 000000000000..b6f27f135954 --- /dev/null +++ b/ui/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/ui/package.json b/ui/package.json index 6575b3744f3a..72601ed84024 100644 --- a/ui/package.json +++ b/ui/package.json @@ -6,48 +6,50 @@ "src" ], "scripts": { - "build": "rm -Rf dist && NODE_ENV=production webpack -p --config ./src/app/webpack.config.js", - "start": "webpack-dev-server --config ./src/app/webpack.config.js", - "lint": "tslint --fix -p ./src/app && ./node_modules/.bin/ts-prune -p src/app/tsconfig.json -i src/models/index.ts", + "build": "rm -Rf dist && NODE_ENV=production webpack --mode production --config ./src/app/webpack.config.js", + "start": "NODE_OPTIONS=--no-experimental-fetch webpack-dev-server --config ./src/app/webpack.config.js", + "lint": "tslint --fix -p ./src/app", "test": "jest" }, + "engines" : { + "node" : ">=16" + }, "dependencies": { "argo-ui": "https://github.com/argoproj/argo-ui.git#v2.5.0", "chart.js": "^2.9.4", "chartjs-plugin-annotation": "^0.5.7", - "classnames": "^2.2.5", - "cron-parser": "^2.16.3", - "cronstrue": "^1.109.0", + "classnames": "^2.3.2", + "cron-parser": "^4.6.0", + "cronstrue": "^2.11.0", "dagre": "^0.8.5", - "formik": "^2.1.2", - "history": "^4.7.2", - "js-yaml": "^3.13.1", + "history": "^4.10.1", + "js-yaml": "^4.1.0", "json-merge-patch": "^0.2.3", - "moment": "^2.24.0", + "moment": "^2.29.4", "monaco-editor": "0.20.0", - "prop-types": "^15.7.2", - "react": "^16.8.3", - "react-chartjs-2": "^2.9.0", - "react-datepicker": "^2.14.0", - "react-dom": "^16.8.3", - "react-moment": "^1.0.0", - "react-monaco-editor": "^0.36.0", + "prop-types": "^15.8.1", + "react": "^16.14.0", + "react-chartjs-2": "^2.11.2", + "react-datepicker": "^2.16.0", + "react-dom": "^16.14.0", + "react-moment": "^1.1.1", + "react-monaco-editor": "^0.50.1", "react-router-dom": "^4.2.2", - "superagent": "^3.8.2", + "superagent": "^8.0.0", "superagent-promise": "^1.1.0", - "swagger-ui-react": "^3.29.0" + "swagger-ui-react": "^4.12.0" }, "devDependencies": { - "@babel/core": "^7.0.0-0", - "@babel/preset-env": "^7.12.1", - "@fortawesome/fontawesome-free": "^5.15.3", + "@babel/core": "^7.19.1", + "@babel/preset-env": "^7.19.1", + "@fortawesome/fontawesome-free": "^6.2.0", "@types/chart.js": "^2.9.24", - "@types/classnames": "^2.2.3", - "@types/dagre": "^0.7.44", + "@types/classnames": "^2.3.1", + "@types/dagre": "^0.7.48", "@types/history": "^4.6.2", "@types/jest": "^26.0.15", - "@types/js-yaml": "^3.12.1", - "@types/prop-types": "^15.5.2", + "@types/js-yaml": "^4.0.5", + "@types/prop-types": "^15.7.5", "@types/react": "^16.8.5", "@types/react-autocomplete": "^1.8.5", "@types/react-datepicker": "^2.11.0", @@ -55,41 +57,40 @@ "@types/react-form": "^2.16.1", "@types/react-helmet": "^6.1.0", "@types/react-router-dom": "^4.2.3", - "@types/superagent": "^3.5.7", - "@types/swagger-ui-react": "^3.23.2", - "babel-jest": "^26.6.3", - "babel-loader": "^8.2.2", - "copy-webpack-plugin": "^4.3.1", - "copyfiles": "^1.2.0", - "file-loader": "^6.0.0", - "glob": "^7.1.2", - "html-webpack-plugin": "^3.2.0", + "@types/superagent": "^4.1.15", + "@types/swagger-ui-react": "^4.11.0", + "babel-jest": "^29.0.3", + "babel-loader": "^8.2.5", + "copy-webpack-plugin": "^5.1.2", + "copyfiles": "^2.4.1", + "file-loader": "^6.2.0", + "glob": "^8.0.3", + "html-webpack-plugin": "^4.5.2", "jest": "^26.6.3", - "monaco-editor-webpack-plugin": "^1.9.0", + "monaco-editor-webpack-plugin": "^1.9.1", "prettier": "^1.19.1", "raw-loader": "^0.5.1", - "react-hot-loader": "^3.1.3", - "sass": "^1.30.0", + "react-hot-loader": "^4.13.0", + "sass": "^1.54.9", "sass-loader": "^10.1.0", - "source-map-loader": "^0.2.4", + "source-map-loader": "^1.1.3", "style-loader": "^0.20.1", "ts-jest": "^26.4.4", "ts-loader": "^7.0.4", - "ts-node": "^4.1.0", - "ts-prune": "^0.8.4", - "tslint": "^5.9.1", + "ts-node": "^9.1.1", + "tslint": "^5.20.1", "tslint-config-prettier": "^1.18.0", - "tslint-plugin-prettier": "^2.1.0", + "tslint-plugin-prettier": "^2.3.0", "tslint-react": "^3.4.0", - "typescript": "^3.9.2", + "typescript": "^4.6.4", "webfonts-generator": "^0.4.0", - "webpack": "^4.35.0", - "webpack-cli": "^3.3.11", - "webpack-dev-server": "^3.7.2" + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0", + "webpack-dev-server": "^4.11.0" }, "resolutions": { "lodash": "4.17.21", - "prismjs": "1.26.0", + "prismjs": "1.27.0", "@types/react": "16.8.5" } } diff --git a/ui/src/app/apidocs/components/apiDocs.tsx b/ui/src/app/apidocs/components/apiDocs.tsx index 7728f687def8..81f543a3cc71 100644 --- a/ui/src/app/apidocs/components/apiDocs.tsx +++ b/ui/src/app/apidocs/components/apiDocs.tsx @@ -3,11 +3,15 @@ import * as React from 'react'; import SwaggerUI from 'swagger-ui-react'; import 'swagger-ui-react/swagger-ui.css'; import {uiUrl} from '../../shared/base'; +import {useCollectEvent} from '../../shared/components/use-collect-event'; -export const ApiDocs = () => ( - -
    - -
    -
    -); +export const ApiDocs = () => { + useCollectEvent('openedApiDocs'); + return ( + +
    + +
    +
    + ); +}; diff --git a/ui/src/app/app-router.tsx b/ui/src/app/app-router.tsx index 90ecfb36b598..3db7e2d811d0 100644 --- a/ui/src/app/app-router.tsx +++ b/ui/src/app/app-router.tsx @@ -14,7 +14,6 @@ import eventSources from './event-sources'; import help from './help'; import login from './login'; import {ModalSwitch} from './modals/modal-switch'; -import pipelines from './pipelines'; import plugins from './plugins'; import reports from './reports'; import sensors from './sensors'; @@ -31,7 +30,6 @@ import workflows from './workflows'; const eventFlowUrl = uiUrl('event-flow'); const sensorUrl = uiUrl('sensors'); -const pipelinesUrl = uiUrl('pipelines'); const workflowsUrl = uiUrl('workflows'); const workflowsEventBindingsUrl = uiUrl('workflow-event-bindings'); const workflowTemplatesUrl = uiUrl('workflow-templates'); @@ -65,6 +63,10 @@ export const AppRouter = ({popupManager, history, notificationsManager}: {popupM return () => sub.unsubscribe(); }, [popupManager]); useEffect(() => { + services.info.getUserInfo().then(userInfo => { + Utils.userNamespace = userInfo.serviceAccountNamespace; + setNamespace(Utils.currentNamespace); + }); services.info .getInfo() .then(info => { @@ -108,11 +110,6 @@ export const AppRouter = ({popupManager, history, notificationsManager}: {popupM path: cronWorkflowsUrl + namespaceSuffix, iconClassName: 'fa fa-clock' }, - { - title: 'Pipelines', - path: pipelinesUrl + '/' + namespace, - iconClassName: 'fa fa-wind' - }, { title: 'Event Flow', path: eventFlowUrl + namespaceSuffix, @@ -172,7 +169,6 @@ export const AppRouter = ({popupManager, history, notificationsManager}: {popupM - diff --git a/ui/src/app/archived-workflows/components/archived-workflow-details/archived-workflow-details.tsx b/ui/src/app/archived-workflows/components/archived-workflow-details/archived-workflow-details.tsx index f357ecda9f88..c776b845daea 100644 --- a/ui/src/app/archived-workflows/components/archived-workflow-details/archived-workflow-details.tsx +++ b/ui/src/app/archived-workflows/components/archived-workflow-details/archived-workflow-details.tsx @@ -2,13 +2,12 @@ import {NotificationType, Page, SlidingPanel} from 'argo-ui'; import * as classNames from 'classnames'; import * as React from 'react'; import {RouteComponentProps} from 'react-router'; -import {execSpec, Link, Workflow} from '../../../../models'; +import {execSpec, Link, NodePhase, Workflow} from '../../../../models'; import {uiUrl} from '../../../shared/base'; import {BasePage} from '../../../shared/components/base-page'; import {ErrorNotice} from '../../../shared/components/error-notice'; import {ProcessURL} from '../../../shared/components/links'; import {Loading} from '../../../shared/components/loading'; -import {ResourceEditor} from '../../../shared/components/resource-editor/resource-editor'; import {services} from '../../../shared/services'; import {WorkflowArtifacts} from '../../../workflows/components/workflow-artifacts'; @@ -25,6 +24,9 @@ import {WorkflowYamlViewer} from '../../../workflows/components/workflow-yaml-vi require('../../../workflows/components/workflow-details/workflow-details.scss'); +const STEP_GRAPH_CONTAINER_MIN_WIDTH = 490; +const STEP_INFO_WIDTH = 570; + interface State { workflow?: Workflow; links?: Link[]; @@ -86,18 +88,28 @@ export class ArchivedWorkflowDetails extends BasePage, ) ) .catch(error => this.setState({error})); + services.info.collectEvent('openedArchivedWorkflowDetails').then(); } public render() { + const workflowPhase: NodePhase = this.state.workflow && this.state.workflow.status ? this.state.workflow.status.phase : undefined; const items = [ + { + title: 'Retry', + iconClassName: 'fa fa-undo', + disabled: workflowPhase === undefined || !(workflowPhase === 'Failed' || workflowPhase === 'Error'), + action: () => this.retryArchivedWorkflow() + }, { title: 'Resubmit', - iconClassName: 'fa fa-redo', - action: () => (this.sidePanel = 'resubmit') + iconClassName: 'fa fa-plus-circle', + disabled: false, + action: () => this.resubmitArchivedWorkflow() }, { title: 'Delete', iconClassName: 'fa fa-trash', + disabled: false, action: () => this.deleteArchivedWorkflow() } ]; @@ -108,6 +120,7 @@ export class ArchivedWorkflowDetails extends BasePage, items.push({ title: link.name, iconClassName: 'fa fa-external-link-alt', + disabled: false, action: () => this.openLink(link) }) ); @@ -176,8 +189,8 @@ export class ArchivedWorkflowDetails extends BasePage, ) : ( -
    -
    +
    +
    {this.tab === 'workflow' ? ( , )}
    {this.nodeId && ( -
    +
    @@ -222,25 +235,6 @@ export class ArchivedWorkflowDetails extends BasePage, {this.sidePanel === 'logs' && ( )} - {this.sidePanel === 'resubmit' && ( - - editing={true} - title='Resubmit Archived Workflow' - kind='Workflow' - value={{ - metadata: { - namespace: this.state.workflow.metadata.namespace, - name: this.state.workflow.metadata.name - }, - spec: this.state.workflow.spec - }} - onSubmit={(value: Workflow) => - services.workflows - .create(value, value.metadata.namespace) - .then(workflow => (document.location.href = uiUrl(`workflows/${workflow.metadata.namespace}/${workflow.metadata.name}`))) - } - /> - )} ); @@ -280,6 +274,36 @@ export class ArchivedWorkflowDetails extends BasePage, }); } + private resubmitArchivedWorkflow() { + if (!confirm('Are you sure you want to resubmit this archived workflow?')) { + return; + } + services.archivedWorkflows + .resubmit(this.state.workflow.metadata.uid, this.state.workflow.metadata.namespace) + .then(workflow => (document.location.href = uiUrl(`workflows/${workflow.metadata.namespace}/${workflow.metadata.name}`))) + .catch(e => { + this.appContext.apis.notifications.show({ + content: 'Failed to resubmit archived workflow ' + e, + type: NotificationType.Error + }); + }); + } + + private retryArchivedWorkflow() { + if (!confirm('Are you sure you want to retry this archived workflow?')) { + return; + } + services.archivedWorkflows + .retry(this.state.workflow.metadata.uid, this.state.workflow.metadata.namespace) + .then(workflow => (document.location.href = uiUrl(`workflows/${workflow.metadata.namespace}/${workflow.metadata.name}`))) + .catch(e => { + this.appContext.apis.notifications.show({ + content: 'Failed to retry archived workflow ' + e, + type: NotificationType.Error + }); + }); + } + private openLink(link: Link) { const object = { metadata: { diff --git a/ui/src/app/archived-workflows/components/archived-workflow-filters/archived-workflow-filters.tsx b/ui/src/app/archived-workflows/components/archived-workflow-filters/archived-workflow-filters.tsx index 70b333d76487..4258c8ae252e 100644 --- a/ui/src/app/archived-workflows/components/archived-workflow-filters/archived-workflow-filters.tsx +++ b/ui/src/app/archived-workflows/components/archived-workflow-filters/archived-workflow-filters.tsx @@ -194,7 +194,7 @@ export class ArchivedWorkflowFilters extends React.Component { this.setState({ - labels: list.items.sort((a, b) => a.localeCompare(b)) || [] + labels: list.items?.sort((a, b) => a.localeCompare(b)) || [] }); }); } diff --git a/ui/src/app/archived-workflows/components/archived-workflow-list/archived-workflow-list.tsx b/ui/src/app/archived-workflows/components/archived-workflow-list/archived-workflow-list.tsx index 21c9eac45b4f..8ebdeed005b6 100644 --- a/ui/src/app/archived-workflows/components/archived-workflow-list/archived-workflow-list.tsx +++ b/ui/src/app/archived-workflows/components/archived-workflow-list/archived-workflow-list.tsx @@ -1,5 +1,4 @@ import {Page} from 'argo-ui'; - import * as React from 'react'; import {Link, RouteComponentProps} from 'react-router-dom'; import * as models from '../../../../models'; @@ -30,6 +29,7 @@ interface State { maxStartedAt?: Date; workflows?: Workflow[]; error?: Error; + deep: boolean; } const defaultPaginationLimit = 10; @@ -55,7 +55,8 @@ export class ArchivedWorkflowList extends BasePage, Sta selectedPhases: phaseQueryParam.length > 0 ? phaseQueryParam : savedOptions.selectedPhases, selectedLabels: labelQueryParam.length > 0 ? labelQueryParam : savedOptions.selectedLabels, minStartedAt: this.parseTime(this.queryParam('minStartedAt')) || this.lastMonth(), - maxStartedAt: this.parseTime(this.queryParam('maxStartedAt')) || this.nextDay() + maxStartedAt: this.parseTime(this.queryParam('maxStartedAt')) || this.nextDay(), + deep: this.queryParam('deep') === 'true' }; } @@ -70,6 +71,15 @@ export class ArchivedWorkflowList extends BasePage, Sta this.state.maxStartedAt, this.state.pagination ); + services.info.collectEvent('openedArchivedWorkflowList').then(); + } + + public componentDidUpdate(): void { + if (this.state.deep === true && this.state.workflows && this.state.workflows.length === 1) { + const workflow = this.state.workflows[0]; + const url = '/archived-workflows/' + workflow.metadata.namespace + '/' + (workflow.metadata.uid || ''); + this.props.history.push(url); + } } public render() { diff --git a/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-details/cluster-workflow-template-details.tsx b/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-details/cluster-workflow-template-details.tsx index cced76bdbec8..acb9e110ad69 100644 --- a/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-details/cluster-workflow-template-details.tsx +++ b/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-details/cluster-workflow-template-details.tsx @@ -59,6 +59,7 @@ export const ClusterWorkflowTemplateDetails = ({history, location, match}: Route .then(info => setNamespace(Utils.getNamespaceWithDefault(info.managedNamespace))) .then(() => setError(null)) .catch(setError); + services.info.collectEvent('openedClusterWorkflowTemplateDetails').then(); }, []); return ( diff --git a/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-list/cluster-workflow-template-list.tsx b/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-list/cluster-workflow-template-list.tsx index e4ee4c105239..9980096def72 100644 --- a/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-list/cluster-workflow-template-list.tsx +++ b/ui/src/app/cluster-workflow-templates/components/cluster-workflow-template-list/cluster-workflow-template-list.tsx @@ -38,6 +38,7 @@ export class ClusterWorkflowTemplateList extends BasePage setEdited(true), [cronWorkflow]); + useCollectEvent('openedCronWorkflowDetails'); + const suspendButton = cronWorkflow && !cronWorkflow.spec.suspend ? { @@ -87,6 +90,100 @@ export const CronWorkflowDetails = ({match, location, history}: RouteComponentPr .catch(setError), disabled: !cronWorkflow || !cronWorkflow.spec.suspend || edited }; + + const openLink = (link: Link) => { + if ((window.event as MouseEvent).ctrlKey || (window.event as MouseEvent).metaKey) { + window.open(link.url, '_blank'); + } else { + document.location.href = link.url; + } + }; + + const getItems = () => { + const items = [ + { + title: 'Submit', + iconClassName: 'fa fa-plus', + disabled: edited, + action: () => + services.workflows + .submit('cronwf', name, namespace) + .then(wf => navigation.goto(uiUrl(`workflows/${wf.metadata.namespace}/${wf.metadata.name}`))) + .then(() => setError(null)) + .catch(setError) + }, + { + title: 'Update', + iconClassName: 'fa fa-save', + disabled: !edited, + action: () => { + // magic - we get the latest from the server and then apply the changes from the rendered version to this + return services.cronWorkflows + .get(name, namespace) + .then(latest => + services.cronWorkflows.update( + { + ...latest, + spec: cronWorkflow.spec, + metadata: {...cronWorkflow.metadata, resourceVersion: latest.metadata.resourceVersion} + }, + cronWorkflow.metadata.name, + cronWorkflow.metadata.namespace + ) + ) + .then(setCronWorkflow) + .then(() => notifications.show({content: 'Updated', type: NotificationType.Success})) + .then(() => setError(null)) + .then(() => setEdited(false)) + .catch(setError); + } + }, + suspendButton, + { + title: 'Delete', + iconClassName: 'fa fa-trash', + disabled: edited, + action: () => { + popup.confirm('confirm', 'Are you sure you want to delete this cron workflow?').then(yes => { + if (yes) { + services.cronWorkflows + .delete(name, namespace) + .then(() => navigation.goto(uiUrl('cron-workflows/' + namespace))) + .then(() => setError(null)) + .catch(setError); + } + }); + } + }, + { + title: 'Share', + iconClassName: 'fa fa-share-alt', + action: () => setSidePanel('share') + } + ]; + + if (cronWorkflow?.spec?.workflowSpec?.workflowTemplateRef) { + const templateName = cronWorkflow.spec.workflowSpec.workflowTemplateRef.name; + const clusterScope = cronWorkflow.spec.workflowSpec.workflowTemplateRef.clusterScope; + const url: string = clusterScope ? `/cluster-workflow-templates/${templateName}` : `/workflow-templates/${cronWorkflow.metadata.namespace}/${templateName}`; + const icon: string = clusterScope ? 'fa fa-window-restore' : 'fa fa-window-maximize'; + + const templateLink: Link = { + name: 'Open Workflow Template', + scope: 'workflow', + url + }; + + items.push({ + title: templateLink.name, + iconClassName: icon, + action: () => openLink(templateLink) + }); + } + + return items; + }; + return ( - services.workflows - .submit('cronwf', name, namespace) - .then(wf => navigation.goto(uiUrl(`workflows/${wf.metadata.namespace}/${wf.metadata.name}`))) - .then(() => setError(null)) - .catch(setError) - }, - { - title: 'Update', - iconClassName: 'fa fa-save', - disabled: !edited, - action: () => { - // magic - we get the latest from the server and then apply the changes from the rendered version to this - return services.cronWorkflows - .get(name, namespace) - .then(latest => - services.cronWorkflows.update( - { - ...latest, - spec: cronWorkflow.spec, - metadata: {...cronWorkflow.metadata, resourceVersion: latest.metadata.resourceVersion} - }, - cronWorkflow.metadata.name, - cronWorkflow.metadata.namespace - ) - ) - .then(setCronWorkflow) - .then(() => notifications.show({content: 'Updated', type: NotificationType.Success})) - .then(() => setError(null)) - .then(() => setEdited(false)) - .catch(setError); - } - }, - suspendButton, - { - title: 'Delete', - iconClassName: 'fa fa-trash', - disabled: edited, - action: () => { - popup.confirm('confirm', 'Are you sure you want to delete this cron workflow?').then(yes => { - if (yes) { - services.cronWorkflows - .delete(name, namespace) - .then(() => navigation.goto(uiUrl('cron-workflows/' + namespace))) - .then(() => setError(null)) - .catch(setError); - } - }); - } - }, - { - title: 'Share', - iconClassName: 'fa fa-share-alt', - action: () => setSidePanel('share') - } - ] + items: getItems() } }}> <> diff --git a/ui/src/app/cron-workflows/components/cron-workflow-list/cron-workflow-list.tsx b/ui/src/app/cron-workflows/components/cron-workflow-list/cron-workflow-list.tsx index 804dd74a9e7b..8d4871b3ff15 100644 --- a/ui/src/app/cron-workflows/components/cron-workflow-list/cron-workflow-list.tsx +++ b/ui/src/app/cron-workflows/components/cron-workflow-list/cron-workflow-list.tsx @@ -9,6 +9,7 @@ import {ExampleManifests} from '../../../shared/components/example-manifests'; import {InfoIcon} from '../../../shared/components/fa-icons'; import {Loading} from '../../../shared/components/loading'; import {Timestamp} from '../../../shared/components/timestamp'; +import {useCollectEvent} from '../../../shared/components/use-collect-event'; import {ZeroState} from '../../../shared/components/zero-state'; import {Context} from '../../../shared/context'; import {getNextScheduledTime} from '../../../shared/cron'; @@ -76,6 +77,8 @@ export const CronWorkflowList = ({match, location, history}: RouteComponentProps .catch(setError); }, [namespace, labels, states]); + useCollectEvent('openedCronWorkflowList'); + return ( ro.stop(); }, [namespace, showFlow]); + useCollectEvent('openedEventFlow'); const graph = buildGraph(eventSources, sensors, workflows, flow, expanded); diff --git a/ui/src/app/event-sources/components/event-source-details/event-source-details.tsx b/ui/src/app/event-sources/components/event-source-details/event-source-details.tsx index adfe43189022..54f56d3a9b6a 100644 --- a/ui/src/app/event-sources/components/event-source-details/event-source-details.tsx +++ b/ui/src/app/event-sources/components/event-source-details/event-source-details.tsx @@ -8,6 +8,7 @@ import {ID} from '../../../event-flow/components/event-flow-details/id'; import {uiUrl} from '../../../shared/base'; import {ErrorNotice} from '../../../shared/components/error-notice'; import {Loading} from '../../../shared/components/loading'; +import {useCollectEvent} from '../../../shared/components/use-collect-event'; import {Context} from '../../../shared/context'; import {historyUrl} from '../../../shared/history'; import {services} from '../../../shared/services'; @@ -72,6 +73,8 @@ export const EventSourceDetails = ({history, location, match}: RouteComponentPro useEffect(() => setEdited(true), [eventSource]); + useCollectEvent('openedEventSourceDetails'); + return ( ( - -
    -
    -
    -
    -

    Documentation

    - - Online Help - - - API Docs - +export const Help = () => { + useCollectEvent('openedHelp'); + return ( + +
    +
    +
    +
    +

    Documentation

    + + Online Help + + + API Docs + +
    -
    -
    -
    -
    -

    Contact

    - - Slack - +
    +
    +
    +

    Contact

    + + Slack + +
    -
    -
    -
    -
    -

    Argo CLI

    - - Releases - +
    +
    +
    +

    Argo CLI

    + + Releases + +
    -
    - -); + + ); +}; diff --git a/ui/src/app/login/components/login.tsx b/ui/src/app/login/components/login.tsx index 1071ac134692..76eff13101a1 100644 --- a/ui/src/app/login/components/login.tsx +++ b/ui/src/app/login/components/login.tsx @@ -1,12 +1,13 @@ import {Page} from 'argo-ui'; import * as React from 'react'; import {uiUrl, uiUrlWithParams} from '../../shared/base'; +import {useCollectEvent} from '../../shared/components/use-collect-event'; require('./login.scss'); const logout = () => { document.cookie = 'authorization=;Max-Age=0'; - document.location.reload(true); + document.location.reload(); }; const user = (token: string) => { const path = uiUrl(''); @@ -20,57 +21,60 @@ const getRedirect = (): string => { } return 'redirect=' + window.location.origin + '/workflows'; }; -export const Login = () => ( - -
    -
    -

    - Login -

    -

    It may not be necessary to be logged in to use Argo Workflows, it depends on how it is configured.

    -

    - Learn more. -

    -
    - -
    -
    +export const Login = () => { + useCollectEvent('openedLogin'); + return ( + +
    +
    +

    + Login +

    +

    It may not be necessary to be logged in to use Argo Workflows, it depends on how it is configured.

    - If your organisation has configured single sign-on: + Learn more.

    -
    - -
    -
    -

    - If your organisation has configured client authentication, get your token following this instructions from{' '} - here and paste in this box: -

    -
    -