diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index cbdb174c02d97..39d7ea922bced 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -1080,6 +1080,10 @@ impl Build { /// done. The file is updated immediately after this function completes. pub fn save_toolstate(&self, tool: &str, state: ToolState) { if let Some(ref path) = self.config.save_toolstates { + if let Some(parent) = path.parent() { + // Ensure the parent directory always exists + t!(std::fs::create_dir_all(parent)); + } let mut file = t!(fs::OpenOptions::new() .create(true) .read(true) diff --git a/src/ci/azure-pipelines/auto.yml b/src/ci/azure-pipelines/auto.yml index 24b07a1b7c950..271c32585449e 100644 --- a/src/ci/azure-pipelines/auto.yml +++ b/src/ci/azure-pipelines/auto.yml @@ -140,6 +140,7 @@ jobs: IMAGE: x86_64-gnu-aux x86_64-gnu-tools: IMAGE: x86_64-gnu-tools + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json x86_64-gnu-debug: IMAGE: x86_64-gnu-debug x86_64-gnu-nopt: @@ -262,8 +263,9 @@ jobs: # MSVC tools tests x86_64-msvc-tools: MSYS_BITS: 64 - SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstates.json windows - RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstates.json + SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstate/toolstates.json + DEPLOY_TOOLSTATES_JSON: toolstates-windows.json # 32/64-bit MinGW builds. # diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml index 01858cca50b1c..c82bc6af738a0 100644 --- a/src/ci/azure-pipelines/steps/run.yml +++ b/src/ci/azure-pipelines/steps/run.yml @@ -201,37 +201,21 @@ steps: condition: and(succeeded(), not(variables.SKIP_JOB)) displayName: Run build -# If we're a deploy builder, use the `aws` command to publish everything to our -# bucket. -- bash: | - set -e - source src/ci/shared.sh - if [ "$AGENT_OS" = "Linux" ]; then - rm -rf obj/build/dist/doc - upload_dir=obj/build/dist - else - rm -rf build/dist/doc - upload_dir=build/dist - fi - ls -la $upload_dir - deploy_dir=rustc-builds - if [ "$DEPLOY_ALT" == "1" ]; then - deploy_dir=rustc-builds-alt - fi - retry aws s3 cp --no-progress --recursive --acl public-read ./$upload_dir s3://$DEPLOY_BUCKET/$deploy_dir/$BUILD_SOURCEVERSION +- bash: src/ci/scripts/upload-artifacts.sh env: AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: and(succeeded(), not(variables.SKIP_JOB), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1'))) displayName: Upload artifacts - -# Upload CPU usage statistics that we've been gathering this whole time. Always -# execute this step in case we want to inspect failed builds, but don't let -# errors here ever fail the build since this is just informational. -- bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$CI_JOB_NAME.csv - env: - AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) - AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: variables['UPLOAD_AWS_SECRET_ACCESS_KEY'] - continueOnError: true - displayName: Upload CPU usage statistics + # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy + # builders *should* have the AWS credentials available. Still, explicitly + # adding the condition is helpful as this way CI will not silently skip + # deploying artifacts from a dist builder if the variables are misconfigured, + # erroring about invalid credentials instead. + condition: | + and( + succeeded(), not(variables.SKIP_JOB), + or( + variables.UPLOAD_AWS_SECRET_ACCESS_KEY, + eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1') + ) + ) diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 415d6b63eb8dc..cdafcbadc9ec7 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -106,6 +106,7 @@ fi mkdir -p $HOME/.cargo mkdir -p $objdir/tmp mkdir -p $objdir/cores +mkdir -p /tmp/toolstate args= if [ "$SCCACHE_BUCKET" != "" ]; then @@ -156,6 +157,7 @@ else args="$args --volume $objdir:/checkout/obj" args="$args --volume $HOME/.cargo:/cargo" args="$args --volume $HOME/rustsrc:$HOME/rustsrc" + args="$args --volume /tmp/toolstate:/tmp/toolstate" args="$args --env LOCAL_USER_ID=`id -u`" fi diff --git a/src/ci/docker/x86_64-gnu-tools/Dockerfile b/src/ci/docker/x86_64-gnu-tools/Dockerfile index 8035195c6ed0a..7687a6ca23e18 100644 --- a/src/ci/docker/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/x86_64-gnu-tools/Dockerfile @@ -26,5 +26,5 @@ ENV CHECK_LINKS 1 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --save-toolstates=/tmp/toolstates.json -ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstates.json linux + --save-toolstates=/tmp/toolstate/toolstates.json +ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstate/toolstates.json linux diff --git a/src/ci/docker/x86_64-gnu-tools/checktools.sh b/src/ci/docker/x86_64-gnu-tools/checktools.sh index 4243effdf9b4b..ebb8c0bda53ee 100755 --- a/src/ci/docker/x86_64-gnu-tools/checktools.sh +++ b/src/ci/docker/x86_64-gnu-tools/checktools.sh @@ -3,7 +3,7 @@ set -eu X_PY="$1" -TOOLSTATE_FILE="$(realpath $2)" +TOOLSTATE_FILE="$(realpath -m $2)" OS="$3" COMMIT="$(git rev-parse HEAD)" CHANGED_FILES="$(git diff --name-status HEAD HEAD^)" @@ -13,6 +13,7 @@ SIX_WEEK_CYCLE="$(( ($(date +%s) / 86400 - 20) % 42 ))" # The Wednesday after this has value 0. # We track this value to prevent regressing tools in the last week of the 6-week cycle. +mkdir -p "$(dirname $TOOLSTATE_FILE)" touch "$TOOLSTATE_FILE" # Try to test all the tools and store the build/test success in the TOOLSTATE_FILE diff --git a/src/ci/scripts/upload-artifacts.sh b/src/ci/scripts/upload-artifacts.sh new file mode 100755 index 0000000000000..312ec9d805012 --- /dev/null +++ b/src/ci/scripts/upload-artifacts.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Upload all the artifacts to our S3 bucket. All the files inside ${upload_dir} +# will be uploaded to the deploy bucket and eventually signed and released in +# static.rust-lang.org. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +upload_dir="$(mktemp -d)" + +# Release tarballs produced by a dist builder. +if [[ "${DEPLOY-0}" -eq "1" ]] || [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + dist_dir=build/dist + if isLinux; then + dist_dir=obj/build/dist + fi + rm -rf "${dist_dir}/doc" + cp -r "${dist_dir}"/* "${upload_dir}" +fi + +# CPU usage statistics. +cp cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv" + +# Toolstate data. +if [[ -n "${DEPLOY_TOOLSTATES_JSON+x}" ]]; then + cp /tmp/toolstate/toolstates.json "${upload_dir}/${DEPLOY_TOOLSTATES_JSON}" +fi + +echo "Files that will be uploaded:" +ls -lah "${upload_dir}" +echo + +deploy_dir="rustc-builds" +if [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + deploy_dir="rustc-builds-alt" +fi +deploy_url="s3://${DEPLOY_BUCKET}/${deploy_dir}/$(ciCommit)" + +retry aws s3 cp --no-progress --recursive --acl public-read "${upload_dir}" "${deploy_url}" diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 37e45b5639dc9..718a5379ae558 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -46,6 +46,10 @@ function getCIBranch { echo "$BUILD_SOURCEBRANCHNAME" } +function ciCommit { + echo "${BUILD_SOURCEVERSION}" +} + function ciCommandAddPath { if [[ $# -ne 1 ]]; then echo "usage: $0 "