-
Notifications
You must be signed in to change notification settings - Fork 323
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore: Build the acir test vectors as part of CI. (#3447)
As title. This also modifies the `./rebuild.sh` script in noir to parallelise test compilation. ~11m reduced to <1m (on mainframe).
- Loading branch information
1 parent
5927103
commit 1a2d1f8
Showing
8 changed files
with
119 additions
and
67 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,15 @@ | ||
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-assert | ||
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-sol | ||
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests | ||
|
||
FROM node:18-alpine | ||
RUN apk update && apk add git bash curl jq | ||
COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build | ||
COPY --from=1 /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol | ||
COPY --from=noir-acir-tests /usr/src/noir/tooling/nargo_cli/tests /usr/src/noir/tooling/nargo_cli/tests | ||
COPY --from=ghcr.io/foundry-rs/foundry:latest /usr/local/bin/anvil /usr/local/bin/anvil | ||
WORKDIR /usr/src/barretenberg/acir_tests | ||
COPY . . | ||
# Run every acir test through a solidity verifier". | ||
# Run every acir test through a solidity verifier. | ||
RUN (cd sol-test && yarn) | ||
RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
# This is a slightly odd location for this Dockerfile, as ostensibly it should belong next to the tests themselves. | ||
# However, the noir subrepo has no concept of the aztec build pipeline, so the Dockerfile wouldn't make sense there. | ||
# So, it lives here. | ||
# This chains off the nargo build, and creates a container with a compiled set of acir tests. | ||
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir | ||
RUN apk add bash jq | ||
ENV PATH="/usr/src/noir/target/release:${PATH}" | ||
WORKDIR /usr/src/noir/tooling/nargo_cli/tests | ||
COPY . . | ||
RUN ./rebuild.sh |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,19 +1,8 @@ | ||
#!/bin/bash | ||
set -eu | ||
|
||
# Pull down the test vectors from the noir repo, if we don't have the folder already. | ||
TEST_SRC=${TEST_SRC:-../../noir/tooling/nargo_cli/tests/acir_artifacts} | ||
|
||
if [ ! -d acir_tests ]; then | ||
if [ -n "${TEST_SRC:-}" ]; then | ||
cp -R $TEST_SRC acir_tests | ||
else | ||
rm -rf noir | ||
git clone -b $BRANCH --filter=blob:none --no-checkout https://github.com/noir-lang/noir.git | ||
cd noir | ||
git sparse-checkout init --cone | ||
git sparse-checkout set test_programs/acir_artifacts | ||
git checkout | ||
cd .. | ||
mv noir/test_programs/acir_artifacts acir_tests | ||
rm -rf noir | ||
fi | ||
fi | ||
cp -R $TEST_SRC acir_tests | ||
fi |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,56 +1,72 @@ | ||
#!/bin/bash | ||
set -e | ||
|
||
excluded_dirs=("workspace" "workspace_default_member") | ||
process_dir() { | ||
local dir=$1 | ||
local current_dir=$2 | ||
local dir_name=$(basename "$dir") | ||
|
||
if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then | ||
mkdir -p $current_dir/acir_artifacts/$dir_name | ||
fi | ||
|
||
cd $dir | ||
if [ -d ./target/ ]; then | ||
rm -r ./target/ | ||
fi | ||
nargo compile && nargo execute witness | ||
|
||
if [ -f ./target/witness.tr ]; then | ||
mv ./target/witness.tr ./target/witness.gz | ||
fi | ||
|
||
if [ -f ./target/${dir_name}.json ]; then | ||
jq -r '.bytecode' ./target/${dir_name}.json | base64 -d > ./target/acir.gz | ||
fi | ||
|
||
rm ./target/${dir_name}.json | ||
|
||
if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then | ||
rm -r "$current_dir/acir_artifacts/$dir_name/target" | ||
fi | ||
mkdir $current_dir/acir_artifacts/$dir_name/target | ||
|
||
mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ | ||
|
||
cd $current_dir | ||
} | ||
|
||
export -f process_dir | ||
|
||
excluded_dirs=("workspace" "workspace_default_member") | ||
current_dir=$(pwd) | ||
base_path="$current_dir/execution_success" | ||
|
||
# Clear the acir_artifacts directory of any existing artifacts | ||
rm -rf $current_dir/acir_artifacts | ||
mkdir -p $current_dir/acir_artifacts | ||
|
||
# Loop over every directory | ||
# Gather directories to process. | ||
dirs_to_process=() | ||
for dir in $base_path/*; do | ||
if [[ ! -d $dir ]]; then | ||
continue | ||
fi | ||
|
||
dir_name=$(basename "$dir") | ||
|
||
if [[ ! " ${excluded_dirs[@]} " =~ " ${dir_name} " ]]; then | ||
if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then | ||
mkdir -p $current_dir/acir_artifacts/$dir_name | ||
fi | ||
|
||
cd $dir | ||
if [ -d ./target/ ]; then | ||
rm -r ./target/ | ||
fi | ||
nargo compile && nargo execute witness | ||
|
||
# Rename witness.tr to witness.gz | ||
if [ -f ./target/witness.tr ]; then | ||
mv ./target/witness.tr ./target/witness.gz | ||
fi | ||
|
||
# Extract bytecode field from JSON, base64 decode it, and save it to the target directory | ||
if [ -f ./target/${dir_name}.json ]; then | ||
jq -r '.bytecode' ./target/${dir_name}.json | base64 -d > ./target/acir.gz | ||
fi | ||
|
||
# Delete the JSON file after extracting bytecode field | ||
rm ./target/${dir_name}.json | ||
|
||
# Clear the target directory in acir_artifacts | ||
if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then | ||
rm -r "$current_dir/acir_artifacts/$dir_name/target" | ||
fi | ||
mkdir $current_dir/acir_artifacts/$dir_name/target | ||
|
||
# Move the artifacts from the target directory to the corresponding directory in acir_artifacts | ||
mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ | ||
|
||
cd $base_path | ||
fi | ||
if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then | ||
continue | ||
fi | ||
dirs_to_process+=("$dir") | ||
done | ||
|
||
# Process each directory in parallel | ||
pids=() | ||
for dir in "${dirs_to_process[@]}"; do | ||
process_dir "$dir" "$current_dir" & | ||
pids+=($!) | ||
done | ||
|
||
# Check the exit status of each background job. | ||
for pid in "${pids[@]}"; do | ||
wait $pid || exit_status=$? | ||
done | ||
|
||
# Exit with a failure status if any job failed. | ||
if [ ! -z "$exit_status" ]; then | ||
exit $exit_status | ||
fi |