diff --git a/.github/workflows/build.yml b/.github/workflows/ci.yml similarity index 72% rename from .github/workflows/build.yml rename to .github/workflows/ci.yml index 030548c6e9..2a3edfcb2d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/ci.yml @@ -4,6 +4,8 @@ on: push: branches: - "**" + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' pull_request: workflow_dispatch: schedule: @@ -17,14 +19,47 @@ env: BOOST_DIR: 3rdparty/boost BOOST_VERSION: "1.76.0" SKDECIDE_SKIP_DEPS: 1 + MAIN_REPO_NAME: "airbus/scikit-decide" jobs: + trigger: + # store trigger reason + runs-on: ubuntu-latest + outputs: + is_release: ${{ steps.reason.outputs.is_release }} + is_push_on_default_branch: ${{ steps.reason.outputs.is_push_on_default_branch }} + is_schedule: ${{ steps.reason.outputs.is_schedule }} + on_main_repo: ${{ steps.reason.outputs.on_main_repo }} + steps: + - id: reason + run: | + echo "is_release=${{ startsWith(github.ref, 'refs/tags/v') }}" >> $GITHUB_OUTPUT + echo "is_push_on_default_branch=${{ (github.event_name == 'push') && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch)) }}" >> $GITHUB_OUTPUT + echo "is_schedule=${{ github.event_name == 'schedule' }}" >> $GITHUB_OUTPUT + echo "on_main_repo=${{ github.repository == env.MAIN_REPO_NAME }}" >> $GITHUB_OUTPUT + + get-release-version: + needs: trigger + runs-on: ubuntu-latest + outputs: + skdecide-version: ${{ steps.get-version.outputs.skdecide_version }} + tag-name: ${{ steps.get-version.outputs.tag_name }} + steps: + - id: get-version + if: needs.trigger.outputs.is_release == 'true' + run: | + tag_name=${GITHUB_REF/refs\/tags\//} # stripping "refs/tags/" + skdecide_version=${tag_name/v/} # stripping "v" + echo "tag_name=${tag_name}" >> $GITHUB_OUTPUT + echo "skdecide_version=${skdecide_version}" >> $GITHUB_OUTPUT + setup: runs-on: ubuntu-latest + needs: trigger outputs: - python_version: ${{ steps.generate-matrix.outputs.python_version }} - python_version_per_os: ${{ steps.generate-matrix.outputs.python_version_per_os }} + python_version_test_per_os: ${{ steps.generate-matrix.outputs.python_version_test_per_os }} + python_version_build_per_os: ${{ steps.generate-matrix.outputs.python_version_build_per_os }} build: ${{ steps.generate-matrix.outputs.build}} test: ${{ steps.generate-matrix.outputs.test}} do_macos: ${{ steps.generate-matrix.outputs.do_macos}} @@ -41,12 +76,13 @@ jobs: run: | from os import environ - python_version = ["3.8", "3.9", "3.10", "3.11"] + python_version_build = ["3.8", "3.9", "3.10", "3.11"] + python_version_test = ["3.8", "3.11"] build = [ "macos-12", "ubuntu-latest", "windows-latest" ] - test = [ "macos-11", "macos-12", "ubuntu-22.04", "ubuntu-20.04", "windows-2019", "windows-2022"] + test = [ "macos-12", "macos-latest", "ubuntu-latest", "windows-latest"] build_doc = "true" - if "${{ github.event_name }}" != "schedule": + if "${{ needs.trigger.outputs.is_release == 'true' || needs.trigger.outputs.is_push_on_default_branch == 'true' || needs.trigger.outputs.is_schedule == 'true' }}" == "false": to_bool = lambda s: True if s == "true" else False python_filter = { '3.11' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.11]') }}"), @@ -55,14 +91,13 @@ jobs: '3.10' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.10]') }}"), } if any(python_filter.values()): - python_version = [v for v in python_version if python_filter[v]] + python_version_build = [v for v in python_version_build if python_filter[v]] + python_version_test = [v for v in python_version_test if python_filter[v]] os_filter = { - 'macos-11' : to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-11]') }}"), + 'macos-latest' : to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-latest]') }}"), 'macos-12' : to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-12]') }}"), - 'ubuntu-22.04' : to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-22.04]') }}"), - 'ubuntu-20.04' : to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-20.04]') }}"), - 'windows-2019' : to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-2019]') }}"), - 'windows-2022' : to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-2022]') }}"), + 'ubuntu-latest' : to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-latest]') }}"), + 'windows-latest' : to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-latest]') }}"), } if set(os_filter.keys()) != set(test): raise Exception("test and os_filter do not contain the same keys") @@ -82,12 +117,13 @@ jobs: build_doc = "false" oses = ["macos", "ubuntu", "windows"] build_dict = {os : [k for k in build if k.startswith(os)] for os in oses} - python_version_per_os = {os: python_version for os in oses} + python_version_build_per_os = {os: python_version_build for os in oses} + python_version_test_per_os = {os: python_version_test for os in oses} # remove python 3.11 for windows: dependency conflict from pyarrow prevent testing the wheel windows python 3.11 - python_version_per_os["windows"] = [v for v in python_version if v != "3.11"] + # python_version_test_per_os["windows"] = [v for v in python_version_test if v != "3.11"] # update build_dict by removing os without any python version for os in build_dict: - if len(python_version_per_os[os]) == 0: + if len(python_version_test_per_os[os]) == 0 or len(python_version_build_per_os[os]) == 0: build_dict[os] = [] with open(environ["GITHUB_OUTPUT"], "a") as f: @@ -96,8 +132,8 @@ jobs: f.write(f"build_doc={build_doc}\n") for os in oses: f.write(f"do_{os}={'true' if len(build_dict[os]) > 0 else 'false'}\n") - f.write(f"python_version={python_version}\n") - f.write(f"python_version_per_os={python_version_per_os}\n") + f.write(f"python_version_build_per_os={python_version_build_per_os}\n") + f.write(f"python_version_test_per_os={python_version_test_per_os}\n") lint-sources: @@ -123,7 +159,7 @@ jobs: strategy: matrix: os: ${{ fromJSON(needs.setup.outputs.build).windows }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }} + python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).windows }} fail-fast: false defaults: run: @@ -197,13 +233,13 @@ jobs: path: dist/*.whl build-macos: - needs: [setup] + needs: [trigger, setup] if: needs.setup.outputs.do_macos == 'true' strategy: matrix: arch: ["arm64", "x86_64"] # NB: only x86_64 wheel will be tested as no macosx_arm64 github runner available os: ${{ fromJSON(needs.setup.outputs.build).macos }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }} + python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).macos }} fail-fast: false defaults: run: @@ -260,12 +296,14 @@ jobs: run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV - name: Install and restore ccache + if: needs.trigger.outputs.is_release == 'false' uses: hendrikmuhs/ccache-action@v1.2 with: key: ${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.arch }} max-size: 80M - name: Let cmake use ccache + if: needs.trigger.outputs.is_release == 'false' run: | echo "CMAKE_CXX_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV} echo "CMAKE_C_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV} @@ -312,8 +350,8 @@ jobs: # build wheel python -m cibuildwheel --output-dir wheelhouse - # set the proper platform tag - #  - with poetry build + cross-compilation for arm64, the tag could been still x64_64 (https://cibuildwheel.readthedocs.io/en/stable/faq/#how-to-cross-compile) + # set the proper platform tag + # - with poetry build + cross-compilation for arm64, the tag could been still x64_64 (https://cibuildwheel.readthedocs.io/en/stable/faq/#how-to-cross-compile) # - we downgrade the displayed macosx version to ensure compatibility with lesser macosx than the ones used on this runner pip install "wheel>=0.40" wheel tags --platform-tag macosx_${MACOSX_DEPLOYMENT_TARGET_WO_DOT}_${ARCH} --remove wheelhouse/*.whl @@ -329,12 +367,12 @@ jobs: path: wheelhouse/*.whl build-ubuntu: - needs: [setup] + needs: [trigger, setup] if: needs.setup.outputs.do_ubuntu == 'true' strategy: matrix: os: ${{ fromJSON(needs.setup.outputs.build).ubuntu }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }} + python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).ubuntu }} fail-fast: false defaults: run: @@ -398,6 +436,7 @@ jobs: key: dev-deps-${{ runner.os }}-${{ hashFiles('scripts/build-skdecide_dev.sh', 'scripts/Dockerfile_x86_64_dev') }} - name: Restore ccache cache + if: needs.trigger.outputs.is_release == 'false' id: ccache-restore uses: actions/cache@v4 with: @@ -415,13 +454,17 @@ jobs: mkdir -p /tmp/docker docker image save -o /tmp/docker/skdecide_dev.tar skdecide_dev fi - # The existence of .ccache directory triggers ccache use in builds-manylinux-wheels.sh - test -d .ccache || mkdir .ccache + if ${{ needs.trigger.outputs.is_release }} == 'false'; then + # The existence of .ccache directory triggers ccache use in builds-manylinux-wheels.sh + test -d .ccache || mkdir .ccache + fi docker build -f scripts/Dockerfile_x86_64 -t skdecide_x86_64 --build-arg PYTHON_VERSION=${{matrix.python-version}} --build-arg SKDECIDE_SKIP_DEPS=${SKDECIDE_SKIP_DEPS} --build-arg BOOST_DIR=${BOOST_DIR} . # Fetch wheels from Docker docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/dist /mytmp - # Fetch ccache from Docker - docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/.ccache /mytmp + if ${{ needs.trigger.outputs.is_release }} == 'false'; then + # Fetch ccache from Docker + docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/.ccache /mytmp + fi - name: Update build cache from wheels if: steps.cache-build-dependencies.outputs.cache-hit != 'true' @@ -438,9 +481,9 @@ jobs: strategy: matrix: os: ${{ fromJSON(needs.setup.outputs.test).windows }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }} + python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).windows }} compiler: [gnu] - fail-fast: true + fail-fast: false runs-on: ${{ matrix.os }} defaults: run: @@ -498,6 +541,7 @@ jobs: - name: Install scikit-decide and test dependencies run: | + pip install ray[rllib]>=2.20 python_version=${{ matrix.python-version }} wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*win*.whl) pip install ${wheelfile}[all] pytest gymnasium[classic-control] @@ -525,8 +569,8 @@ jobs: strategy: matrix: os: ${{ fromJSON(needs.setup.outputs.test).macos }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }} - fail-fast: true + python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).macos }} + fail-fast: false runs-on: ${{ matrix.os }} env: minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources @@ -538,8 +582,8 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Install libomp package - run: brew install libomp + - name: Install needed brew dependencies + run: brew install libomp eccodes - name: Set up Python uses: actions/setup-python@v5 @@ -582,10 +626,17 @@ jobs: ${{ env.minizinc_config_cmdline }} minizinc --version + - name: Install prerelease version of pymip (only for macos arm64) + if: matrix.os == 'macos-latest' + run: | + python -m pip install -U pip + pip install mip==1.16rc0 + - name: Install scikit-decide and test dependencies run: | python_version=${{ matrix.python-version }} - wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*macos*x86_64.whl) + arch=$(uname -m) + wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*macos*${arch}.whl) pip install ${wheelfile}[all] pytest gymnasium[classic-control] - name: Test with pytest @@ -611,8 +662,8 @@ jobs: strategy: matrix: os: ${{ fromJSON(needs.setup.outputs.test).ubuntu }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }} - fail-fast: true + python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).ubuntu }} + fail-fast: false runs-on: ${{ matrix.os }} env: minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib @@ -696,18 +747,13 @@ jobs: python scripts/md2py.py docs/guide/README.md tests/test_guide.py python tests/test_guide.py - build-doc: - needs: [ build-ubuntu, setup ] - if: needs.setup.outputs.build_doc == 'true' - uses: ./.github/workflows/build-doc.yml - with: - doc-prerequisites-cmdline: export DO_SKIP_MZN_CHECK=1 - - upload-nightly: - if: (github.repository == 'airbus/scikit-decide') && (github.ref == 'refs/heads/master') - needs: [test-ubuntu, test-macos, test-windows] + upload-release: + needs: [ get-release-version, trigger, test-ubuntu, test-macos, test-windows ] + if: | + (needs.trigger.outputs.is_release == 'true') + || ((needs.trigger.outputs.is_push_on_default_branch == 'true') && (needs.trigger.outputs.on_main_repo == 'true')) + || ((needs.trigger.outputs.is_schedule == 'true') && (needs.trigger.outputs.on_main_repo == 'true')) runs-on: ubuntu-latest - steps: - uses: actions/download-artifact@v4 with: @@ -715,10 +761,20 @@ jobs: merge-multiple: true path: dist/ - - run: | - zip -r dist.zip dist/ + - name: Upload artifacts 📦 to release + uses: ncipollo/release-action@v1 + if: needs.trigger.outputs.is_release == 'true' + with: + artifacts: dist/*.whl + tag: ${{ needs.get-release-version.outputs.tag-name }} + allowUpdates: true + generateReleaseNotes: true + + - if: needs.trigger.outputs.is_release == 'false' + run: zip -r dist.zip dist/ - uses: actions/github-script@v7 + if: needs.trigger.outputs.is_release == 'false' id: asset with: script: | @@ -848,20 +904,141 @@ jobs: return uploadedAsset.data.browser_download_url; result-encoding: string + update-notebooks-for-colab-and-binder: + runs-on: ubuntu-latest + needs: [ trigger, get-release-version, build-ubuntu ] + if: needs.trigger.outputs.is_release == 'true' + outputs: + notebooks-branch: ${{ steps.write-output.outputs.notebooks_branch }} + binder-full-ref: ${{ steps.write-output.outputs.binder_full_ref }} + steps: + - uses: actions/checkout@v4 + - name: replace scikit-decide version to install in colab notebooks + run: | + version=${{ needs.get-release-version.outputs.skdecide-version }} + old_pip_spec_pattern="\(skdecide_pip_spec.*\)scikit-decide\[all\]" + new_pip_spec_pattern="\1scikit-decide[all]==${version}" + if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_PASSWORD != '' }} == 'true'; then + # install from TestPypi if on a fork + new_pip_spec_pattern="${new_pip_spec_pattern} --extra-index-url https://test.pypi.org/simple/" + fi + old_using_nightly_pattern="\(using_nightly_version\s*=\s*\)True" + new_using_nightly_pattern="using_nightly_version = False" + shopt -s globstar # enable ** + sed -i \ + -e "s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|g" \ + -e "s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|g" \ + notebooks/**/*.ipynb + - name: replace scikit-decide version to install in binder environment + run: | + version=${{ needs.get-release-version.outputs.skdecide-version }} + # environment.yml + linefilter="/^name/!" + old_pip_spec_pattern="\(\s*\)-.*scikit-decide.*$" + new_pip_spec_pattern="\1- scikit-decide[all]==$version" + if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then + # install from TestPypi if on a fork + new_pip_spec_pattern="${new_pip_spec_pattern}\n\1- --extra-index-url https://test.pypi.org/simple/" + fi + sed_command="${linefilter}s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|" + echo sed -i -e ${sed_command} binder/environment.yml + sed -i -e "${sed_command}" binder/environment.yml + # postBuild + old_using_nightly_pattern="using_nightly_version=true" + new_using_nightly_pattern="using_nightly_version=false" + sed_command="s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|" + sed -i -e "${sed_command}" binder/postBuild + - name: push modifications on a dedicated tag + id: push-tuto-release-tag + run: | + current_tag_name=${{ needs.get-release-version.outputs.tag-name }} + new_tag_name="notebooks-${current_tag_name}" + echo ${new_tag_name} + git config user.name "Actions" + git config user.email "actions@github.com" + git commit binder notebooks -m "Install appropriate version of scikit-decide" + git tag ${new_tag_name} -m "Use release ${current_tag_name} in binder and colab" + git push origin ${new_tag_name} + # store new tag name as notebooks branch + echo "notebooks_branch=${new_tag_name}" >> $GITHUB_ENV + echo "binder_full_ref=${{ github.repository }}/${new_tag_name}" >> $GITHUB_ENV + - name: write new notebooks branch in job outputs + id: write-output + run: | + echo "notebooks_branch=${notebooks_branch}" >> $GITHUB_OUTPUT + echo "binder_full_ref=${binder_full_ref}" >> $GITHUB_OUTPUT + + build-doc: + needs: [ build-ubuntu, setup, update-notebooks-for-colab-and-binder ] + # if: always() + # -> trigger even if one needed job was skipped (namely update-notebooks-for-colab-and-binder) + # -> needed jobs successes must be checked explicitely + if: | + always() + && (needs.setup.outputs.build_doc == 'true') + && (needs.build-ubuntu.result == 'success') + uses: ./.github/workflows/build-doc.yml + with: + notebooks-branch: ${{ needs.update-notebooks-for-colab-and-binder.outputs.notebooks-branch }} + doc-prerequisites-cmdline: export DO_SKIP_MZN_CHECK=1 + + deploy: + needs: [ trigger, test-ubuntu, test-macos, test-windows ] + if: needs.trigger.outputs.is_release == 'true' + runs-on: ubuntu-latest + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + pattern: wheels-* + merge-multiple: true + path: wheels + + - name: Publish distribution 📦 to PyPI + env: + PYPI_TOKEN: ${{ secrets.PYPI_PASSWORD }} + if: needs.trigger.outputs.on_main_repo == 'true' && env.PYPI_TOKEN != '' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_PASSWORD }} + packages_dir: wheels/ + + - name: Publish distribution 📦 to Test PyPI + env: + TEST_PYPI_TOKEN: ${{ secrets.TEST_PYPI_PASSWORD }} + if: needs.trigger.outputs.on_main_repo == 'false' && env.TEST_PYPI_TOKEN != '' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.TEST_PYPI_PASSWORD }} + packages_dir: wheels/ + repository_url: https://test.pypi.org/legacy/ + deploy-doc: - needs: [build-doc, test-windows, test-macos, test-ubuntu, upload-nightly] + needs: [trigger, get-release-version, update-notebooks-for-colab-and-binder, build-doc, test-windows, test-macos, test-ubuntu, upload-release, deploy] # if: always() - # -> trigger even if one needed job was skipped (namely upload-nightly) + # -> trigger even if one needed job was skipped (namely upload-release or deploy) # -> needed jobs successes must be checked explicitely if: | always() - && (github.ref == 'refs/heads/master') && (needs.build-doc.result == 'success') && (needs.test-windows.result == 'success') && (needs.test-macos.result == 'success') && (needs.test-ubuntu.result == 'success') && ( - (needs.upload-nightly.result == 'success') - || (github.repository != 'airbus/scikit-decide') + ( + (needs.trigger.outputs.is_push_on_default_branch == 'true') + && ( + (needs.upload-release.result == 'success') + || (needs.trigger.outputs.on_main_repo == 'false') + ) + ) + || ( + (needs.trigger.outputs.is_release == 'true') + && (needs.deploy.result == 'success') + ) ) uses: ./.github/workflows/deploy-doc.yml + with: + binder-env-fullref: ${{ needs.update-notebooks-for-colab-and-binder.outputs.binder-full-ref }} + doc-clean: ${{ needs.trigger.outputs.is_release == 'false'}} + doc-version: ${{ needs.get-release-version.outputs.skdecide-version }} diff --git a/.github/workflows/deploy-doc.yml b/.github/workflows/deploy-doc.yml index 9a65d1be55..7eb4fe8e29 100644 --- a/.github/workflows/deploy-doc.yml +++ b/.github/workflows/deploy-doc.yml @@ -3,10 +3,10 @@ name: Deploy doc on: workflow_call: inputs: - doc-version-path: - description: "Path where the doc should be deployed" + doc-version: + description: "version of the library for which we are deploying the doc. If empty, then it is main branch doc." required: FALSE - default: "/" + default: "" type: string doc-clean: description: "Whether the previous doc should be cleaned. (Always ignore subfolders in version/)" @@ -26,18 +26,20 @@ on: binder-env-fullref: description: "Full ref of the binder env to build. The build is triggered only if not empty." required: false - default: "${{ github.repository }}/${{ github.ref_name}}" + default: "" type: string jobs: trigger-binder-build: runs-on: ubuntu-latest - if: inputs.binder-env-fullref != '' steps: - uses: actions/checkout@v4 # checkout triggering branch to get scripts/trigger_binder.sh - name: Trigger a build for default binder env ref on each BinderHub deployments in the mybinder.org federation continue-on-error: true run: | binder_env_full_ref=${{ inputs.binder-env-fullref }} + if [ -z "${binder_env_full_ref}" ]; then + binder_env_full_ref="${{ github.repository }}/${{ github.ref_name}}" + fi echo Triggering binder environment build for ${binder_env_full_ref} bash scripts/trigger_binder.sh https://ovh.mybinder.org/build/gh/${binder_env_full_ref} bash scripts/trigger_binder.sh https://ovh2.mybinder.org/build/gh/${binder_env_full_ref} @@ -52,12 +54,23 @@ jobs: with: name: ${{ inputs.doc-artifact-name }} path: ${{ inputs.doc-path }} + - name: set doc-version-path + id: set-doc-version-path + run: | + doc_version=${{ inputs.doc-version }} + if [ -z "${doc_version}" ]; then + doc_version_path="/" + else + doc_version_path="/version/${doc_version}/" + fi + echo "doc_version_path=${doc_version_path}" >> $GITHUB_OUTPUT + - name: Deploy documentation in a version subfolder on GH pages uses: JamesIves/github-pages-deploy-action@v4 with: branch: gh-pages # The branch the action should deploy to. folder: ${{ inputs.doc-path }} # The folder the action should deploy. - target-folder: ${{ inputs.doc-version-path }} # The folder the action should deploy to. + target-folder: ${{ steps.set-doc-version-path.outputs.doc_version_path }} # The folder the action should deploy to. commit-message: publish documentation single-commit: true clean: ${{ inputs.doc-clean }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 80238ce42f..0000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,782 +0,0 @@ -name: 🔨 Release scikit-decide - -on: - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - -env: - BOOST_DIR: 3rdparty/boost - BOOST_VERSION: "1.76.0" - SKDECIDE_SKIP_DEPS: 1 - MAIN_REPO_NAME: "airbus/scikit-decide" - -jobs: - - setup: - runs-on: ubuntu-latest - outputs: - python_version: ${{ steps.generate-matrix.outputs.python_version }} - python_version_per_os: ${{ steps.generate-matrix.outputs.python_version_per_os }} - build: ${{ steps.generate-matrix.outputs.build}} - test: ${{ steps.generate-matrix.outputs.test}} - deploy_test_pypi: ${{ steps.generate-matrix.outputs.deploy_test_pypi}} - steps: - - uses: actions/setup-python@v5 - - name: Generate Matrix - id: generate-matrix - shell: python3 {0} - run: | - from os import environ - - python_version = ["3.8", "3.9", "3.10", "3.11"] - build_dict = { "macos":["macos-12"], "ubuntu":["ubuntu-latest"], "windows":["windows-latest"] } - test_dict = { "macos":["macos-12", "macos-11"], "ubuntu":["ubuntu-22.04", "ubuntu-20.04"], "windows":["windows-2019", "windows-2022" ]} - deploy_test_pypi = "true" - python_version_per_os = {os: python_version for os in build_dict} - # remove python 3.11 for windows: dependency conflict from pyarrow prevent testing the wheel windows python 3.11 python_version_per_os["windows"] = [v for v in python_version if v != "3.11"] - python_version_per_os["windows"] = [v for v in python_version if v != "3.11"] - - if "${{ contains(github.event.head_commit.message, '[ci: skip-deploy-test-pypi]') }}" == "true": - deploy_test_pypi = "false" - - with open(environ["GITHUB_OUTPUT"], "a") as f: - f.write(f"build={build_dict}\n") - f.write(f"test={test_dict}\n") - f.write(f"python_version={python_version}\n") - f.write(f"python_version_per_os={python_version_per_os}\n") - f.write(f"deploy_test_pypi={deploy_test_pypi}\n") - - get-release-version: - runs-on: ubuntu-latest - outputs: - skdecide-version: ${{ steps.get-version.outputs.skdecide_version }} - tag-name: ${{ steps.get-version.outputs.tag_name }} - steps: - - id: get-version - run: | - tag_name=${GITHUB_REF/refs\/tags\//} # stripping "refs/tags/" - skdecide_version=${tag_name/v/} # stripping "v" - echo "tag_name=${tag_name}" >> $GITHUB_OUTPUT - echo "skdecide_version=${skdecide_version}" >> $GITHUB_OUTPUT - - lint-sources: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - name: install pre-commit - run: python -m pip install pre-commit - - name: get cached pre-commit hooks - uses: actions/cache@v4 - with: - path: ~/.cache/pre-commit - key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} - - name: pre-commit checks - run: pre-commit run --show-diff-on-failure --color=always --all-files - - build-windows: - needs: [setup] - strategy: - matrix: - os: ${{ fromJSON(needs.setup.outputs.build).windows }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }} - fail-fast: false - defaults: - run: - shell: bash - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout scikit-decide source code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 0 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Load cached venv - id: cached-pip-wheels - uses: actions/cache@v4 - with: - path: ~/.cache - key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} - - - name: Restore Boost cache - uses: actions/cache@v4 - id: cache-boost - with: - path: ${{env.BOOST_DIR}} - key: BOOST_${{env.BOOST_VERSION}} - - - name: Install Boost - if: steps.cache-boost.outputs.cache-hit != 'true' - run: | - mkdir -p $BOOST_DIR - curl --silent --location --output - \ - https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\ - tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost - shell: bash - - - name: Restore build dependencies - id: cache-build-dependencies - uses: actions/cache@v4 - with: - path: | - skdecide/hub/bin - skdecide/hub/share - skdecide/hub/*.msc - key: ${{ runner.os }}-cache-deps - - - name: Update SKDECIDE_SKIP_DEPS - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV - - - name: Build wheel - run: | - export "Boost_ROOT=$PWD/$BOOST_DIR" - python -m pip install --upgrade pip - pip install build poetry-dynamic-versioning - python -m build --sdist --wheel - - - name: Update build cache from wheels - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: 7z x dist/*.whl -y - - - name: Upload as build artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-${{ matrix.os }}-${{ matrix.python-version }} - path: dist/*.whl - - build-macos: - needs: [setup] - strategy: - matrix: - arch: [ "arm64", "x86_64" ] # NB: only x86_64 wheel will be tested as no macosx_arm64 github runner available - os: ${{ fromJSON(needs.setup.outputs.build).macos }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }} - fail-fast: false - defaults: - run: - shell: bash - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout scikit-decide source code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 0 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Load cached venv - id: cached-pip-wheels - uses: actions/cache@v4 - with: - path: ~/.cache - key: venv-${{ runner.os }}-${{ matrix.arch }}-${{ hashFiles('**/poetry.lock') }} - - - name: Restore Boost cache - uses: actions/cache@v4 - id: cache-boost - with: - path: ${{env.BOOST_DIR}} - key: BOOST_${{env.BOOST_VERSION}} - - - name: Install Boost - if: steps.cache-boost.outputs.cache-hit != 'true' - run: | - mkdir -p $BOOST_DIR - curl --silent --location --output - \ - https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\ - tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost - shell: bash - - - name: Restore build dependencies - id: cache-build-dependencies - uses: actions/cache@v4 - with: - path: | - skdecide/hub/bin - skdecide/hub/share - skdecide/hub/*.msc - key: ${{ runner.os }}-${{ matrix.arch }}-cache-deps - - - name: Update SKDECIDE_SKIP_DEPS - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV - - - name: Build wheel - env: - ARCH: ${{ matrix.arch }} - PYTHON_VERSION: ${{ matrix.python-version }} - run: | - - if [[ "$ARCH" == arm64 ]]; then - # SciPy requires 12.0 on arm to prevent kernel panics - # https://github.com/scipy/scipy/issues/14688 - # We use the same deployment target to match SciPy. - export MACOSX_DEPLOYMENT_TARGET=12.0 - OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-arm64/llvm-openmp-11.1.0-hf3c4609_1.tar.bz2" - else - export MACOSX_DEPLOYMENT_TARGET=10.15 - OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-64/llvm-openmp-11.1.0-hda6cdc1_1.tar.bz2" - fi - PYTHON_VERSION_WO_DOT=$(echo ${PYTHON_VERSION} | sed -e 's/\.//g') # remove "." - MACOSX_DEPLOYMENT_TARGET_WO_DOT=$(echo ${MACOSX_DEPLOYMENT_TARGET} | sed -e 's/\./_/g') # replace "." by "_" - - # install appropriate version of openmp - sudo conda create -n build $OPENMP_URL - - # make openmp and boost available - export Boost_ROOT=$PWD/$BOOST_DIR - export OpenMP_ROOT=$CONDA/envs/build - export CPPFLAGS="$CPPFLAGS -Xpreprocessor -fopenmp" - export CFLAGS="$CFLAGS -I$OpenMP_ROOT/include" - export CXXFLAGS="$CXXFLAGS -I$OpenMP_ROOT/include" - export LDFLAGS="$LDFLAGS -Wl,-rpath,$OpenMP_ROOT/lib -L$OpenMP_ROOT/lib -lomp" - - # cmake flag to cross-compile the c++ - export CMAKE_OSX_ARCHITECTURES=${ARCH} - - python -m pip install cibuildwheel - # cibuildwheel flags - export CIBW_BUILD_FRONTEND="build" - export CIBW_ARCHS=${ARCH} - export CIBW_BUILD="cp${PYTHON_VERSION_WO_DOT}-macosx_${ARCH}" - - # build wheel - python -m cibuildwheel --output-dir wheelhouse - - # set the proper platform tag - #  - with poetry build + cross-compilation for arm64, the tag could been still x64_64 (https://cibuildwheel.readthedocs.io/en/stable/faq/#how-to-cross-compile) - # - we downgrade the displayed macosx version to ensure compatibility with lesser macosx than the ones used on this runner - pip install "wheel>=0.40" - wheel tags --platform-tag macosx_${MACOSX_DEPLOYMENT_TARGET_WO_DOT}_${ARCH} --remove wheelhouse/*.whl - - - name: Update build cache from wheels - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: 7z x wheelhouse/*.whl -y - - - name: Upload as build artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.arch }} - path: wheelhouse/*.whl - - build-ubuntu: - needs: [setup] - strategy: - matrix: - os: ${{ fromJSON(needs.setup.outputs.build).ubuntu }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }} - fail-fast: false - defaults: - run: - shell: bash - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout scikit-decide source code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 0 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Load cached venv - id: cached-pip-wheels - uses: actions/cache@v4 - with: - path: ~/.cache - key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} - - - name: Restore Boost cache - uses: actions/cache@v4 - id: cache-boost - with: - path: ${{env.BOOST_DIR}} - key: BOOST_${{env.BOOST_VERSION}} - - - name: Install Boost - if: steps.cache-boost.outputs.cache-hit != 'true' - run: | - mkdir -p $BOOST_DIR - curl --silent --location --output - \ - https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\ - tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost - shell: bash - - - name: Restore build dependencies - id: cache-build-dependencies - uses: actions/cache@v4 - with: - path: | - skdecide/hub/bin - skdecide/hub/share - skdecide/hub/*.msc - key: ${{ runner.os }}-cache-deps - - - name: Update SKDECIDE_SKIP_DEPS - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV - - - name: Restore docker dev image - id: cache-dev-deps - uses: actions/cache@v4 - with: - path: /tmp/docker - key: dev-deps-${{ runner.os }}-${{ hashFiles('scripts/build-skdecide_dev.sh', 'scripts/Dockerfile_x86_64_dev') }} - - - name: Build wheels - run: | - # Load skdecide_dev image from cache, or build it if not found - if test -f /tmp/docker/skdecide_dev.tar; then - docker image load -i /tmp/docker/skdecide_dev.tar - else - docker build -f scripts/Dockerfile_x86_64_dev -t skdecide_dev . - mkdir -p /tmp/docker - docker image save -o /tmp/docker/skdecide_dev.tar skdecide_dev - fi - docker build -f scripts/Dockerfile_x86_64 -t skdecide_x86_64 --build-arg PYTHON_VERSION=${{matrix.python-version}} --build-arg SKDECIDE_SKIP_DEPS=${SKDECIDE_SKIP_DEPS} --build-arg BOOST_DIR=${BOOST_DIR} . - # Fetch wheels from Docker - docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/dist /mytmp - - - name: Update build cache from wheels - if: steps.cache-build-dependencies.outputs.cache-hit != 'true' - run: 7z x dist/*.whl -y - - - name: Upload as build artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-${{ matrix.os }}-${{ matrix.python-version }} - path: dist/*.whl - - test-windows: - needs: [build-macos, build-ubuntu, build-windows, setup] - strategy: - matrix: - os: ${{ fromJSON(needs.setup.outputs.test).windows }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }} - compiler: [gnu] - fail-fast: true - runs-on: ${{ matrix.os }} - defaults: - run: - shell: bash - env: - minizinc_config_cmdline: export PATH=$PATH:~/AppData/Local/Programs/MiniZinc - minizinc_cache_path: ~/AppData/Local/Programs/MiniZinc - minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled-setup-win64.exe - minizinc_downloaded_filepath: minizinc_setup.exe - minizinc_install_cmdline: cmd //c "minizinc_setup.exe /verysilent /currentuser /norestart /suppressmsgboxes /sp" - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - pattern: wheels-windows*-${{ matrix.python-version }} - merge-multiple: true - path: wheels - - - name: get MininZinc path to cache - id: get-mzn-cache-path - run: | - echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables - - - name: Restore MiniZinc cache - id: cache-minizinc - uses: actions/cache@v4 - with: - path: ${{ steps.get-mzn-cache-path.outputs.path }} - key: ${{ env.minizinc_url }} - - - name: Download MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }} - - - name: Install MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - ${{ env.minizinc_install_cmdline }} - - - name: Test minizinc install - run: | - ${{ env.minizinc_config_cmdline }} - minizinc --version - - - name: Install scikit-decide and test dependencies - run: | - python_version=${{ matrix.python-version }} - wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*win*.whl) - pip install ${wheelfile}[all] pytest gymnasium[classic-control] - - - name: Test with pytest - run: | - # configure minizinc - ${{ env.minizinc_config_cmdline }} - # test minizinc - python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')" - # run pytest - # we split tests using - # - c++ scikit-decide library - # - ortools (scheduling) - # - deep-learning solvers (solvers/python) - # - from others - # to avoid openmp versions conflicts - pytest -v -s tests/*/cpp - pytest -v -s tests/solvers/python - pytest -v -s tests/scheduling - pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling - - test-macos: - needs: [build-macos, build-ubuntu, build-windows, setup] - strategy: - matrix: - os: ${{ fromJSON(needs.setup.outputs.test).macos }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }} - fail-fast: true - runs-on: ${{ matrix.os }} - env: - minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources - minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app - minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled.dmg - minizinc_downloaded_filepath: bin/minizinc.dmg - minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/. - - steps: - - uses: actions/checkout@v4 - - - name: Install libomp package - run: brew install libomp - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - pattern: wheels-macos*-${{ matrix.python-version }}* - merge-multiple: true - path: wheels - - - name: Create bin/ - run: mkdir -p bin - - name: get MininZinc path to cache - id: get-mzn-cache-path - run: | - echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables - - - name: Restore MiniZinc cache - id: cache-minizinc - uses: actions/cache@v4 - with: - path: ${{ steps.get-mzn-cache-path.outputs.path }} - key: ${{ env.minizinc_url }} - - - name: Download MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }} - - - name: Install MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - ${{ env.minizinc_install_cmdline }} - - - name: Test minizinc install - run: | - ${{ env.minizinc_config_cmdline }} - minizinc --version - - - name: Install scikit-decide and test dependencies - run: | - python_version=${{ matrix.python-version }} - wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*macos*x86_64.whl) - pip install ${wheelfile}[all] pytest gymnasium[classic-control] - - - name: Test with pytest - run: | - # configure minizinc - ${{ env.minizinc_config_cmdline }} - # test minizinc - python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')" - # run pytest - # we split tests using - # - c++ scikit-decide library - # - ortools (scheduling) - # - deep-learning solvers (solvers/python) - # - from others - # to avoid openmp versions conflicts - pytest -v -s tests/*/cpp - pytest -v -s tests/solvers/python - pytest -v -s tests/scheduling - pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling - - test-ubuntu: - needs: [build-macos, build-ubuntu, build-windows, setup] - strategy: - matrix: - os: ${{ fromJSON(needs.setup.outputs.test).ubuntu }} - python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }} - fail-fast: true - runs-on: ${{ matrix.os }} - env: - minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib - minizinc_cache_path: $(pwd)/bin/squashfs-root - minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-x86_64.AppImage - minizinc_downloaded_filepath: bin/minizinc.AppImage - minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd .. - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - pattern: wheels-ubuntu*-${{ matrix.python-version }} - merge-multiple: true - path: wheels - - - name: Create bin/ - run: mkdir -p bin - - - name: get MininZinc path to cache - id: get-mzn-cache-path - run: | - echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables - - - name: Restore MiniZinc cache - id: cache-minizinc - uses: actions/cache@v4 - with: - path: ${{ steps.get-mzn-cache-path.outputs.path }} - key: ${{ env.minizinc_url }} - - - name: Download MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }} - - - name: Install MiniZinc - if: steps.cache-minizinc.outputs.cache-hit != 'true' - run: | - ${{ env.minizinc_install_cmdline }} - - - name: Test minizinc install - run: | - ${{ env.minizinc_config_cmdline }} - minizinc --version - - - name: Install scikit-decide and test dependencies - run: | - python_version=${{ matrix.python-version }} - wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*manylinux*.whl) - pip install ${wheelfile}[all] pytest gymnasium[classic-control] - - - name: Test with pytest - run: | - # configure minizinc - ${{ env.minizinc_config_cmdline }} - # test minizinc - python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')" - # run pytest - # we split tests using - # - c++ scikit-decide library - # - ortools (scheduling) - # - deep-learning solvers (solvers/python) - # - from others - # to avoid openmp versions conflicts - pytest -v -s tests/*/cpp - pytest -v -s tests/solvers/python - pytest -v -s tests/scheduling - pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling - - upload: - needs: [get-release-version, test-ubuntu, test-macos, test-windows] - runs-on: ubuntu-latest - - steps: - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - pattern: wheels-* - merge-multiple: true - path: wheels - - - name: Upload artifacts 📦 to release - uses: ncipollo/release-action@v1 - with: - artifacts: wheels/*.whl - tag: ${{ needs.get-release-version.outputs.tag-name }} - allowUpdates: true - generateReleaseNotes: true - - deploy: - needs: [upload, setup] - runs-on: ubuntu-latest - - steps: - - name: Download artifact - uses: actions/download-artifact@v4 - with: - pattern: wheels-* - merge-multiple: true - path: wheels - - - name: Publish distribution 📦 to PyPI - env: - PYPI_TOKEN: ${{ secrets.PYPI_PASSWORD }} - if: github.repository == env.MAIN_REPO_NAME && env.PYPI_TOKEN != '' - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_PASSWORD }} - packages_dir: wheels/ - - - name: Publish distribution 📦 to Test PyPI - env: - TEST_PYPI_TOKEN: ${{ secrets.TEST_PYPI_PASSWORD }} - if: env.TEST_PYPI_TOKEN != '' && needs.setup.outputs.deploy_test_pypi == 'true' - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.TEST_PYPI_PASSWORD }} - packages_dir: wheels/ - repository_url: https://test.pypi.org/legacy/ - - update-notebooks-for-colab-and-binder: - runs-on: ubuntu-latest - needs: [ get-release-version, build-ubuntu ] - outputs: - notebooks-branch: ${{ steps.write-output.outputs.notebooks_branch }} - binder-full-ref: ${{ steps.write-output.outputs.binder_full_ref }} - steps: - - uses: actions/checkout@v4 - - name: replace scikit-decide version to install in colab notebooks - run: | - version=${{ needs.get-release-version.outputs.skdecide-version }} - old_pip_spec_pattern="\(skdecide_pip_spec.*\)scikit-decide\[all\]" - new_pip_spec_pattern="\1scikit-decide[all]==${version}" - if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_PASSWORD != '' }} == 'true'; then - # install from TestPypi if on a fork - new_pip_spec_pattern="${new_pip_spec_pattern} --extra-index-url https://test.pypi.org/simple/" - fi - old_using_nightly_pattern="\(using_nightly_version\s*=\s*\)True" - new_using_nightly_pattern="using_nightly_version = False" - shopt -s globstar # enable ** - sed -i \ - -e "s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|g" \ - -e "s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|g" \ - notebooks/**/*.ipynb - - name: replace scikit-decide version to install in binder environment - run: | - version=${{ needs.get-release-version.outputs.skdecide-version }} - # environment.yml - linefilter="/^name/!" - old_pip_spec_pattern="\(\s*\)-.*scikit-decide.*$" - new_pip_spec_pattern="\1- scikit-decide[all]==$version" - if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then - # install from TestPypi if on a fork - new_pip_spec_pattern="${new_pip_spec_pattern}\n\1- --extra-index-url https://test.pypi.org/simple/" - fi - sed_command="${linefilter}s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|" - echo sed -i -e ${sed_command} binder/environment.yml - sed -i -e "${sed_command}" binder/environment.yml - # postBuild - old_using_nightly_pattern="using_nightly_version=true" - new_using_nightly_pattern="using_nightly_version=false" - sed_command="s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|" - sed -i -e "${sed_command}" binder/postBuild - - name: push modifications on a dedicated tag - id: push-tuto-release-tag - run: | - current_tag_name=${{ needs.get-release-version.outputs.tag-name }} - new_tag_name="notebooks-${current_tag_name}" - echo ${new_tag_name} - git config user.name "Actions" - git config user.email "actions@github.com" - git commit binder notebooks -m "Install appropriate version of scikit-decide" - git tag ${new_tag_name} -m "Use release ${current_tag_name} in binder and colab" - git push origin ${new_tag_name} - # store new tag name as notebooks branch - echo "notebooks_branch=${new_tag_name}" >> $GITHUB_ENV - echo "binder_full_ref=${{ github.repository }}/${new_tag_name}" >> $GITHUB_ENV - - name: write new notebooks branch in job outputs - id: write-output - run: | - echo "notebooks_branch=${notebooks_branch}" >> $GITHUB_OUTPUT - echo "binder_full_ref=${binder_full_ref}" >> $GITHUB_OUTPUT - - build-doc: - needs: [ build-ubuntu, setup, update-notebooks-for-colab-and-binder ] - uses: ./.github/workflows/build-doc.yml - with: - notebooks-branch: ${{ needs.update-notebooks-for-colab-and-binder.outputs.notebooks-branch }} - doc-prerequisites-cmdline: export DO_SKIP_MZN_CHECK=1 - - deploy-doc: - needs: [build-doc, test-windows, test-macos, test-ubuntu, update-notebooks-for-colab-and-binder, get-release-version] - uses: ./.github/workflows/deploy-doc.yml - with: - binder-env-fullref: ${{ needs.update-notebooks-for-colab-and-binder.outputs.binder-full-ref }} - doc-clean: "false" - doc-version-path: /version/${{ needs.get-release-version.outputs.skdecide-version }}/ - - delete-nightly-release: - runs-on: ubuntu-latest - needs: [deploy] - - steps: - - name: Delete nightly release - uses: actions/github-script@v7 - with: - script: | - const releases = await github.rest.repos.listReleases({ - owner: context.repo.owner, - repo: context.repo.repo, - }) - - const nightlyRelease = releases.data.find(r => r.tag_name === 'nightly') - - if (nightlyRelease) { - await github.rest.repos.deleteRelease({ - owner: context.repo.owner, - repo: context.repo.repo, - release_id: nightlyRelease.id, - }) - console.log(`${nightlyRelease.tag_name} release has been deleted`) - - } else { - console.log('No nightly release found') - }