Extra Acceptance Tests (feature-enabled networks) #891
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Exactly the same setup and teardown steps as run.yml, just different test tags | |
name: Extra Acceptance Tests (feature-enabled networks) | |
on: | |
schedule: | |
- cron: '0 12 * * *' | |
pull_request: | |
paths-ignore: | |
- '**.md' | |
- '.gitignore' | |
branches: | |
- '**' | |
push: | |
paths-ignore: | |
- '**.md' | |
- '.gitignore' | |
branches: | |
- 'master' | |
- 'dev-*' | |
jobs: | |
condition: | |
name: Evaluate workflow run conditions | |
runs-on: ubuntu-20.04 | |
outputs: | |
should_run: '${{ steps.check.outputs.val }}' | |
use_aws: '${{ steps.check.outputs.useAws }}' | |
infra: '${{ steps.check.outputs.infra }}' | |
steps: | |
- name: Check | |
id: check | |
run: | | |
val="false" | |
if [ \( "${{ github.event_name }}" == "schedule" -a "${{ secrets.SCHEDULE_RUN }}" != "disable" \) -o "${{ github.event_name }}" != "schedule" ]; then | |
val="true" | |
fi | |
if [ "$val" == "false" ]; then | |
echo "::warning ::Runs are conditionally skipped" | |
fi | |
useAws="true" | |
infraName="AWS" | |
if [ "${{ secrets.AWS_ACCESS_KEY_ID }}" == "" ] || [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ secrets.DISABLE_AWS }}" == "true" ]; then | |
useAws="false" | |
infraName="GithubActionsVM" | |
fi | |
echo "::warning ::Networks are provisioned using $infraName infrastructure" | |
echo "::set-output name=val::$val" | |
echo "::set-output name=useAws::$useAws" | |
echo "::set-output name=infra::$infraName" | |
docker-build: | |
name: 'Build Docker image' | |
if: needs.condition.outputs.should_run == 'true' | |
needs: | |
- condition | |
runs-on: ubuntu-20.04 | |
outputs: | |
output_dir: '${{ steps.prepare.outputs.output_dir }}' | |
output_file: '${{ steps.prepare.outputs.output_file }}' | |
image_file: '${{ steps.prepare.outputs.image_file }}' | |
image_name: '${{ steps.prepare.outputs.image_name }}' | |
steps: | |
- name: 'Prepare' | |
id: prepare | |
run: | | |
output_dir=${{ runner.temp }}/docker | |
mkdir -p $output_dir | |
echo "::set-output name=output_dir::$output_dir" | |
echo "::set-output name=output_file::$output_dir/acctests.tar.gz" | |
echo "::set-output name=image_file::acctests.tar" | |
echo "::set-output name=image_name::quorumengineering/acctests:gh" | |
- name: 'Cache docker image' | |
id: cache-image | |
uses: actions/cache@v2 | |
with: | |
path: ${{ steps.prepare.outputs.output_dir }} | |
key: ${{ github.sha }}-extra | |
- name: 'Check out project files' | |
if: steps.cache-image.outputs.cache-hit != 'true' | |
uses: actions/checkout@v2 | |
- name: 'Build docker image' | |
if: steps.cache-image.outputs.cache-hit != 'true' | |
id: build | |
run: | | |
docker build -t ${{ steps.prepare.outputs.image_name }} . | |
docker save ${{ steps.prepare.outputs.image_name }} > ${{ steps.prepare.outputs.image_file }} | |
tar cfvz ${{ steps.prepare.outputs.output_file }} ${{ steps.prepare.outputs.image_file }} | |
run: | |
# This workflow uses tag expression and its sha256 hash to aggregate test results | |
# from each execution. It is important that the job name has tag expression in the | |
# suffix and encapsulated within parathensis | |
name: Test (${{ matrix.tag }}) (privacy-enhancements=${{matrix.privacy-enhancements}}, privacy-precompile=${{matrix.privacy-precompile}}, privacy-marker-transactions=${{matrix.privacy-marker-transactions}}, enable-gas-price=${{matrix.enable-gas-price}}) | |
if: needs.condition.outputs.should_run == 'true' | |
needs: | |
- condition | |
- docker-build | |
strategy: | |
fail-fast: false | |
matrix: | |
# list of tag expression being executed in parallel | |
include: | |
# privacy enhancements tests | |
- tag: '(basic && !privacy-enhancements-disabled) || privacy-enhancements || mandatory-recipients || basic-raft || (advanced && raft) || networks/typical::raft' | |
privacy-enhancements: true | |
privacy-precompile: false | |
privacy-marker-transactions: false | |
enable-gas-price: false | |
- tag: '(basic && !privacy-enhancements-disabled) || privacy-enhancements || mandatory-recipients || basic-istanbul || (advanced && istanbul) || networks/typical::istanbul' | |
privacy-enhancements: true | |
privacy-precompile: false | |
privacy-marker-transactions: false | |
enable-gas-price: false | |
# privacy precompile/privacy marker transaction tests | |
- tag: 'basic || basic-raft || (advanced && raft) || networks/typical::raft' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: false | |
enable-gas-price: false | |
- tag: 'basic || basic-istanbul || (advanced && istanbul) || networks/typical::istanbul' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: false | |
enable-gas-price: false | |
- tag: 'basic || basic-istanbul || (advanced && istanbul) || networks/typical::qbft' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: false | |
enable-gas-price: false | |
- tag: '(multitenancy || privacy-precompile-enabled) && networks/plugins::raft-multitenancy' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || basic-raft || (advanced && raft) || networks/typical::raft-simple-mps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || basic-istanbul || (advanced && istanbul) || networks/typical::istanbul-simple-mps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || basic-istanbul || (advanced && istanbul) || networks/typical::qbft-simple-mps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || networks/typical::raftmps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || networks/typical::istanbulmps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-precompile-disabled) || networks/typical::qbftmps' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
# privacy enhancements + privacy precompile/privacy marker transaction tests | |
- tag: '(basic && !privacy-enhancements-disabled && !privacy-precompile-disabled) || privacy-enhancements || mandatory-recipients || privacy-precompile-enabled || basic-raft || (advanced && raft) || networks/typical::raft' | |
privacy-enhancements: true | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-enhancements-disabled && !privacy-precompile-disabled) || privacy-enhancements || mandatory-recipients || privacy-precompile-enabled || basic-istanbul || (advanced && istanbul) || networks/typical::istanbul' | |
privacy-enhancements: true | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: '(basic && !privacy-enhancements-disabled && !privacy-precompile-disabled) || privacy-enhancements || mandatory-recipients || privacy-precompile-enabled || basic-istanbul || (advanced && istanbul) || networks/typical::qbft' | |
privacy-enhancements: true | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: false | |
- tag: 'privacy-precompile-compatibility && networks/template::raft-4nodes' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: false # do not enable pmts as the test will do this on the necessary nodes | |
enable-gas-price: false | |
- tag: 'privacy-precompile-compatibility && networks/template::istanbul-4nodes' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: false # do not enable pmts as the test will do this on the necessary nodes | |
enable-gas-price: false | |
# tests for enabled gas-price | |
- tag: 'gas-price-enabled || networks/typical::raft' | |
privacy-enhancements: false | |
privacy-precompile: false | |
privacy-marker-transactions: false | |
enable-gas-price: true | |
- tag: 'gas-price-enabled || networks/typical::raft' | |
privacy-enhancements: false | |
privacy-precompile: true | |
privacy-marker-transactions: true | |
enable-gas-price: true | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: 'Download docker image' | |
uses: actions/cache@v2 | |
with: | |
path: ${{ needs.docker-build.outputs.output_dir }} | |
key: ${{ github.sha }}-extra | |
- name: 'Prepare environment' | |
id: setup | |
run: | | |
tar xfvz ${{ needs.docker-build.outputs.output_file }} | |
docker load --input ${{ needs.docker-build.outputs.image_file }} | |
tagKey=$(echo -n "${{ matrix.tag }}" | shasum --algorithm=256 | awk '{print $1}') | |
mvnArg="" | |
dockerEnv="--network host -v /var/run/docker.sock:/var/run/docker.sock" | |
local_image="true" | |
if [ "${{ needs.condition.outputs.use_aws }}" == "true" ]; then | |
infraFolder="networks/_infra/aws-ec2" | |
infraProfile="${{ secrets.AWS_REGION }}" | |
mvnArg="-Dinfra.target=$infraFolder::$infraProfile" | |
dockerEnv="-e AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} -e AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} -e TF_VAR_vpc_id=${{ secrets.AWS_VPC_ID }} -e TF_VAR_public_subnet_id=${{ secrets.AWS_PUBLIC_SUBNET_ID }}" | |
echo "INFRA_FOLDER=$infraFolder" >> $GITHUB_ENV | |
echo "INFRA_PROFILE=$infraProfile" >> $GITHUB_ENV | |
local_image="false" | |
fi | |
dockerEnvFile=${{ runner.temp }}/env.list | |
# now we check if we should use the custom docker images in this repo | |
gitref_path="${{ github.ref }}" | |
gitref_path=${gitref_path/refs\/heads\//} # for refs/heads/my-branch | |
if [[ $gitref_path == dev-* ]]; then | |
echo "${{ github.token }}" | docker login https://docker.pkg.github.com -u ${{ github.repository_owner }} --password-stdin | |
quorum_docker_image=$(echo "docker.pkg.github.com/${{ github.repository }}/quorum-$gitref_path:develop" | tr '[:upper:]' '[:lower:]' ) | |
tessera_docker_image=$(echo "docker.pkg.github.com/${{ github.repository }}/tessera-$gitref_path:develop" | tr '[:upper:]' '[:lower:]') | |
has_quorum_docker_image=$(docker pull $quorum_docker_image >/dev/null 2>&1; echo $?) | |
has_tessera_docker_image=$(docker pull $tessera_docker_image >/dev/null 2>&1; echo $?) | |
echo "$quorum_docker_image: $has_quorum_docker_image" | |
echo "$tessera_docker_image: $has_tessera_docker_image" | |
if [ $has_quorum_docker_image -eq 0 ]; then | |
echo "::warning ::Using $quorum_docker_image" | |
echo "TF_VAR_quorum_docker_image={name=\"$quorum_docker_image\", local=$local_image}" >> $dockerEnvFile | |
docker pull quorumengineering/quorum:develop | |
docker pull quorumengineering/quorum:latest | |
fi | |
if [ $has_tessera_docker_image -eq 0 ]; then | |
echo "::warning ::Using $tessera_docker_image" | |
echo "TF_VAR_tessera_docker_image={name=\"$tessera_docker_image\", local=$local_image}" >> $dockerEnvFile | |
docker pull quorumengineering/tessera:develop | |
docker pull quorumengineering/tessera:latest | |
fi | |
if [ $has_quorum_docker_image -eq 0 ] || [ $has_tessera_docker_image -eq 0 ]; then | |
echo "TF_VAR_docker_registry=[{name=\"docker.pkg.github.com\", username=\"${{ github.repository_owner }}\", password=\"${{ github.token }}\"}]" >> $dockerEnvFile | |
fi | |
fi | |
echo "TF_VAR_privacy_enhancements={block=0, enabled=${{ matrix.privacy-enhancements}}}" >> $dockerEnvFile | |
echo "TF_VAR_privacy_precompile={block=0, enabled=${{ matrix.privacy-precompile}}}" >> $dockerEnvFile | |
echo "TF_VAR_privacy_marker_transactions=${{ matrix.privacy-marker-transactions}}" >> $dockerEnvFile | |
echo "TF_VAR_enable_gas_price={block=0, enabled=${{ matrix.enable-gas-price}}}" >> $dockerEnvFile | |
echo "::set-output name=tag::$tagKey" | |
echo "::set-output name=mvnArg::$mvnArg" | |
echo "::set-output name=dockerEnv::$dockerEnv" | |
echo "::set-output name=outputDir::${{ runner.temp }}" | |
echo "::set-output name=dockerEnvFile::$dockerEnvFile" | |
- name: 'Run tests using ${{ needs.condition.outputs.infra }}' | |
run: | | |
# we don't remove the container after run as we need to clean up the infra if used | |
docker run \ | |
--name acctests-run ${{ steps.setup.outputs.dockerEnv }} \ | |
-v ${{ steps.setup.outputs.outputDir }}:${{ steps.setup.outputs.outputDir }} \ | |
--env-file ${{ steps.setup.outputs.dockerEnvFile }} \ | |
${{ needs.docker-build.outputs.image_name }} test \ | |
-Pauto \ | |
-Dtags="${{ matrix.tag }}" ${{ steps.setup.outputs.mvnArg }} \ | |
-Dauto.outputDir=${{ steps.setup.outputs.outputDir }} \ | |
-Dauto.jobid=${{ steps.setup.outputs.tag }} | |
# -PgaugeFailSafe \ | |
- name: 'Failure info' | |
if: ${{ failure() }} | |
run: | | |
echo "Docker container info" | |
set -x | |
docker images | |
docker ps -a | |
set +x | |
IFS=$'\n' # set internal field separator so we can iterate over docker ps output | |
for CONTAINER in $(docker ps -a --format {{.Names}}) | |
do | |
echo "writing logs for $CONTAINER to ${CONTAINER}.log" | |
docker logs $CONTAINER > ${{ steps.setup.outputs.outputDir }}/${CONTAINER}.log 2>&1 | |
echo "writing inspect output for $CONTAINER to ${CONTAINER}-inspect.json" | |
docker container inspect $CONTAINER > ${{ steps.setup.outputs.outputDir }}/${CONTAINER}-inspect.json 2>&1 | |
done | |
- name: 'Read test report' | |
if: always() | |
run: | | |
echo "::group::failures" | |
if [ -r ${{ steps.setup.outputs.outputDir}}/failures.txt ]; | |
then | |
failuresRaw="$(cat ${{ steps.setup.outputs.outputDir }}/failures.txt | jq -r '.[] | @base64')" | |
SAVEIFS=$IFS # Save current IFS | |
IFS=$'\n' # Change IFS to new line | |
failures=($failuresRaw) # split to array | |
IFS=$SAVEIFS # Restore IFS | |
for (( i=0; i<${#failures[@]}; i++ )) | |
do | |
row=${failures[$i]} | |
_jq() { | |
echo ${row} | base64 --decode | jq -r ${1} | |
} | |
echo "$(_jq '.file'): $(_jq '.message')" | |
echo "::error file=$(_jq '.file'),line=$(_jq '.line'),col=$(_jq '.col')::$(_jq '.message')" | |
done | |
fi | |
echo "::endgroup::" | |
echo "::group::skipped" | |
if [ -r ${{ steps.setup.outputs.outputDir}}/skipped.txt ]; | |
then | |
skippedRaw="$(cat ${{ steps.setup.outputs.outputDir }}/skipped.txt | jq -r '.[] | @base64')" | |
SAVEIFS=$IFS # Save current IFS | |
IFS=$'\n' # Change IFS to new line | |
skipped=($skippedRaw) # split to array | |
IFS=$SAVEIFS # Restore IFS | |
for (( i=0; i<${#skipped[@]}; i++ )) | |
do | |
row=${skipped[$i]} | |
_jq() { | |
echo ${row} | base64 --decode | jq -r ${1} | |
} | |
echo "$(_jq '.message')" | |
echo "::warning ::$(_jq '.message')" | |
done | |
fi | |
echo "::endgroup::" | |
if [ -r ${{ steps.setup.outputs.outputDir}}/summary.txt ]; | |
then | |
cat ${{ steps.setup.outputs.outputDir}}/summary.txt; | |
fi | |
- name: 'Upload test report' | |
if: always() | |
uses: actions/upload-artifact@v2 | |
with: | |
name: testreport-${{ steps.setup.outputs.tag }} | |
path: ${{ steps.setup.outputs.outputDir }}/*.* # only files not directory | |
- name: 'Destroy infrastructure resources if ever created' | |
if: always() && needs.condition.outputs.use_aws == 'true' | |
# we don't care about containers running on the remote VM | |
run: | | |
docker commit acctests-run quorumengineering/acctests:after | |
docker run --rm ${{ steps.setup.outputs.dockerEnv }} \ | |
-v ${{ steps.setup.outputs.outputDir }}:${{ steps.setup.outputs.outputDir }} \ | |
quorumengineering/acctests:after \ | |
exec:exec@infra.terraform-destroy \ | |
-Pauto \ | |
-Dinfra.folder="${{ env.INFRA_FOLDER }}" \ | |
-Dinfra.profile="${{ env.INFRA_PROFILE }}" | |
notify: | |
if: always() && github.event_name != 'pull_request' && needs.condition.outputs.should_run == 'true' | |
name: Notify | |
needs: | |
- condition | |
- docker-build | |
- run | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: 'Setup metadata' | |
id: setup | |
run: | | |
gitref_path="${{ github.ref }}" | |
gitref_path=${gitref_path/refs\/heads/tree} # for refs/heads/my-branch | |
gitref_path=${gitref_path/refs\/tags/tree} # for refs/tags/v1.0.0 | |
gitref_path=${gitref_path#refs\/} # for refs/pull/123/merge | |
gitref_path=${gitref_path%/merge} # for refs/pull/123/merge | |
echo "::set-output name=gitref-path::$gitref_path" | |
- name: 'Download test reports' | |
uses: actions/download-artifact@v2 | |
- name: 'Aggregate test reports' | |
id: report | |
# all test reports are now downloaded and folders are prefixed with 'testreport' | |
# each test folder contains a JSON file: <sha256(tag)>.json | |
# this step will output 2 jsons: | |
# - an aggregated summary | |
# - a map: sha256(tag) => summary | |
run: | | |
# combine all json files into one | |
tree | |
all="all.json" | |
for f in `ls -d testreport-*`; do | |
hash=${f#testreport-} | |
cat "$f/$hash.json" | jq --arg h "$hash" '{ ($h) : . }' >> $all | |
done | |
# combine into a JSON array and format output | |
reports=$(cat $all | jq -c -s add | jq -sR 'rtrimstr("\n")') | |
reports=${reports#\"} | |
reports=${reports%\"} | |
# sum up reports | |
summary=$(cat $all | jq -c -s 'reduce (.[] | to_entries[] | .value | to_entries[]) as {$key,$value} ({}; .[$key] += $value)' | jq -sR 'rtrimstr("\n")') | |
summary=${summary#\"} | |
summary=${summary%\"} | |
echo "::set-output name=reports::$reports" | |
echo "::set-output name=summary::$summary" | |
- name: 'Prepare Slack message with full info' | |
id: status | |
uses: actions/github-script@0.8.0 | |
with: | |
script: | | |
// need this utility function to hash the tag so we can read test report | |
var sha256=function a(b){function c(a,b){return a>>>b|a<<32-b}for(var d,e,f=Math.pow,g=f(2,32),h="length",i="",j=[],k=8*b[h],l=a.h=a.h||[],m=a.k=a.k||[],n=m[h],o={},p=2;64>n;p++)if(!o[p]){for(d=0;313>d;d+=p)o[d]=p;l[n]=f(p,.5)*g|0,m[n++]=f(p,1/3)*g|0}for(b+="\x80";b[h]%64-56;)b+="\x00";for(d=0;d<b[h];d++){if(e=b.charCodeAt(d),e>>8)return;j[d>>2]|=e<<(3-d)%4*8}for(j[j[h]]=k/g|0,j[j[h]]=k,e=0;e<j[h];){var q=j.slice(e,e+=16),r=l;for(l=l.slice(0,8),d=0;64>d;d++){var s=q[d-15],t=q[d-2],u=l[0],v=l[4],w=l[7]+(c(v,6)^c(v,11)^c(v,25))+(v&l[5]^~v&l[6])+m[d]+(q[d]=16>d?q[d]:q[d-16]+(c(s,7)^c(s,18)^s>>>3)+q[d-7]+(c(t,17)^c(t,19)^t>>>10)|0),x=(c(u,2)^c(u,13)^c(u,22))+(u&l[1]^u&l[2]^l[1]&l[2]);l=[w+x|0].concat(l),l[4]=l[4]+w|0}for(d=0;8>d;d++)l[d]=l[d]+r[d]|0}for(d=0;8>d;d++)for(e=3;e+1;e--){var y=l[d]>>8*e&255;i+=(16>y?0:"")+y.toString(16)}return i}; | |
var summary = JSON.parse('${{ steps.report.outputs.summary }}') | |
var reports = JSON.parse('${{ steps.report.outputs.reports }}') | |
var gitref_path = "${{ steps.setup.outputs.gitref-path }}" | |
//////////////////////////////////// | |
// retrieve workflow run data | |
//////////////////////////////////// | |
console.log("get workflow run") | |
const wf_run = await github.actions.getWorkflowRun({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
run_id: ${{ github.run_id }} | |
}) | |
console.log(wf_run.data) | |
console.log("get jobs for workflow run:", wf_run.data.jobs_url) | |
const jobs_response = await github.request(wf_run.data.jobs_url) | |
console.log("Got jobs_response") | |
//////////////////////////////////// | |
// build slack notification message | |
//////////////////////////////////// | |
// some utility functions | |
var date_diff_func = function(start, end) { | |
var duration = end - start | |
// format the duration | |
var delta = duration / 1000 | |
var days = Math.floor(delta / 86400) | |
delta -= days * 86400 | |
var hours = Math.floor(delta / 3600) % 24 | |
delta -= hours * 3600 | |
var minutes = Math.floor(delta / 60) % 60 | |
delta -= minutes * 60 | |
var seconds = Math.floor(delta % 60) | |
var format_func = function(v, text, check) { | |
if (v <= 0 && check) { | |
return "" | |
} else { | |
return v + text | |
} | |
} | |
return format_func(days, "d", true) + format_func(hours, "h", true) + format_func(minutes, "m", true) + format_func(seconds, "s", false) | |
} | |
var status_icon_func = function(s) { | |
switch (s) { | |
case "w_success": | |
return ":white_check_mark:" | |
case "w_failure": | |
return ":no_entry:" | |
case "w_cancelled": | |
return ":warning:" | |
case "success": | |
return "\u2713" | |
case "failure": | |
return "\u2717" | |
default: | |
return "\u20e0" | |
} | |
} | |
const commit = "${{ github.sha }}".substr(0, 6) | |
console.log("Preparing message for :", commit) | |
var pr = "" | |
for (p of wf_run.data.pull_requests) { | |
console.log("Fetching pr info from ", p.url) | |
const pull_response = await github.request(p.url) | |
pr += `,<${pull_response.data.html_url}|PR #${p.number}>` | |
} | |
if (pr != "") { | |
pr = `for ${pr.substr(1)}` | |
} | |
console.log("Built pr message :", pr) | |
// build the message | |
var job_blocks = [] | |
var is_wf_success = true | |
var is_wf_failure = false | |
console.log("Iterating jobs ", jobs_response.data.jobs.length) | |
for (j of jobs_response.data.jobs) { | |
console.log(j.name, ":", j.status, j.conclusion, j.started_at, j.completed_at) | |
// ignore the current job running this script | |
if (j.status != "completed") { | |
continue | |
} | |
if (j.conclusion != "success") { | |
is_wf_success = false | |
} | |
if (j.conclusion == "failure") { | |
is_wf_failure = true | |
} | |
// try to obtain the summary if available | |
var tag = j.name.replace(/[^\(]+\((.+)\)/g, "$1") // take only the tag which is in the curly brackets | |
var hash = sha256(tag) | |
console.log("Tag: " + tag + ", Hash: " + hash) | |
console.log("Looking in", reports) | |
var job_summary = reports[hash] | |
var job_summary_text = "" | |
if (job_summary != undefined) { | |
job_summary_text = `:sunny: ${job_summary.passed} :thunder_cloud_and_rain: ${job_summary.failed} :umbrella_with_rain_drops: ${job_summary.skipped} ` | |
} | |
if (j.conclusion == "failure") { | |
job_blocks.push({ | |
type: "section", | |
text: { | |
type: "mrkdwn", | |
text: `${status_icon_func(j.conclusion)} <${j.html_url}|${j.name}>\n${job_summary_text}:hourglass: ${date_diff_func(new Date(j.started_at), new Date(j.completed_at))}` | |
} | |
}) | |
} | |
} | |
console.log("Built job_blocks :", job_blocks) | |
var workflow_status = "w_cancelled" | |
if (is_wf_success) { | |
workflow_status = "w_success" | |
} else if (is_wf_failure) { | |
workflow_status = "w_failure" | |
} | |
var context_elements = [ | |
{ | |
"type": "mrkdwn", | |
"text": "*Repo:* <https://github.com/${{ github.repository }}|${{ github.repository }}>" | |
}, | |
{ | |
"type": "mrkdwn", | |
"text": `*Branch:* <https://github.com/${{ github.repository }}/${gitref_path}|${{ github.ref }}>` | |
}, | |
{ | |
"type": "mrkdwn", | |
"text": `*Event:* ${wf_run.data.event}` | |
} | |
] | |
if (wf_run.data.event != 'schedule') { | |
context_elements.push( | |
{ | |
"type": "mrkdwn", | |
"text": `*Commit:* <https://github.com/${{ github.repository }}/commit/${wf_run.data.head_commit.id}|${wf_run.data.head_commit.id.substr(0, 8)}>` | |
}, | |
{ | |
"type": "mrkdwn", | |
"text": `*Author:* ${wf_run.data.head_commit.author.name}` | |
} | |
) | |
} | |
var summary_text =`:zap: ${job_blocks.length} :sunny: ${summary.passed} :thunder_cloud_and_rain: ${summary.failed} :umbrella_with_rain_drops: ${summary.skipped} :stopwatch: ${date_diff_func(new Date(wf_run.data.created_at), new Date(wf_run.data.updated_at))}` | |
var header_blocks = [ | |
{ | |
type: "section", | |
text: { | |
type: "mrkdwn", | |
text: `${status_icon_func(workflow_status)} *${{ github.workflow }}* (ran on ${{ needs.condition.outputs.infra }}) <${wf_run.data.html_url}|#${{ github.run_number }}>\n${summary_text}` | |
} | |
}, | |
{ | |
type: "context", | |
elements: context_elements, | |
}, | |
{ | |
type: "divider" | |
} | |
] | |
var slack_msg = { | |
blocks: [].concat(header_blocks, job_blocks) | |
} | |
console.log("Built slack_msg : ", slack_msg) | |
return slack_msg | |
- name: 'Prepare Slack message with partial info' | |
id: short_status | |
if: failure() | |
uses: actions/github-script@0.8.0 | |
with: | |
script: | | |
//////////////////////////////////// | |
// retrieve workflow run data | |
//////////////////////////////////// | |
console.log("retrieve workflow run data") | |
const wf_run = await github.actions.getWorkflowRun({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
run_id: ${{ github.run_id }} | |
}) | |
var date_diff_func = function(start, end) { | |
var duration = end - start | |
// format the duration | |
var delta = duration / 1000 | |
var days = Math.floor(delta / 86400) | |
delta -= days * 86400 | |
var hours = Math.floor(delta / 3600) % 24 | |
delta -= hours * 3600 | |
var minutes = Math.floor(delta / 60) % 60 | |
delta -= minutes * 60 | |
var seconds = Math.floor(delta % 60) | |
var format_func = function(v, text, check) { | |
if (v <= 0 && check) { | |
return "" | |
} else { | |
return v + text | |
} | |
} | |
return format_func(days, "d", true) + format_func(hours, "h", true) + format_func(minutes, "m", true) + format_func(seconds, "s", false) | |
} | |
var slack_msg = { | |
blocks: [ | |
{ | |
type: "section", | |
text: { | |
type: "mrkdwn", | |
text: `:skull_and_crossbones: *${{ github.workflow }}* <${wf_run.data.html_url}|#${{ github.run_number }}>\n:stopwatch: ${date_diff_func(new Date(wf_run.data.created_at), new Date(wf_run.data.updated_at))}` | |
} | |
} | |
] | |
} | |
return slack_msg | |
- name: 'Send to Slack' | |
if: always() | |
run: | | |
cat <<JSON > long_message.json | |
${{ steps.status.outputs.result }} | |
JSON | |
cat <<JSON > short_message.json | |
${{ steps.short_status.outputs.result }} | |
JSON | |
_post() { | |
curl -X POST ${{ secrets.SLACK_WEBHOOK_URL }} -H "Content-type: application/json" --data "@${1}" | |
} | |
_post "long_message.json" || _post "short_message.json" |