diff --git a/.github/spot-runner-action/dist/index.js b/.github/spot-runner-action/dist/index.js index b34f8c07dc9..bca6f38f5f8 100644 --- a/.github/spot-runner-action/dist/index.js +++ b/.github/spot-runner-action/dist/index.js @@ -127,6 +127,9 @@ class Ec2Instance { this.client = new aws_sdk_1.default.EC2({ credentials: this.credentials, region: this.config.awsRegion, + maxRetries: 3, + // base 10 seconds for the exponential backoff, up to 3 times + retryDelayOptions: { base: 10000 }, }); this.tags = this.getTags(); } @@ -138,6 +141,9 @@ class Ec2Instance { this.client = new aws_sdk_1.default.EC2({ credentials: credentials, region: this.config.awsRegion, + maxRetries: 3, + // base 10 seconds for the exponential backoff, up to 3 times + retryDelayOptions: { base: 10000 } }); } return this.client; @@ -748,8 +754,10 @@ function requestAndWaitForSpot(config) { // wait 10 seconds yield new Promise((r) => setTimeout(r, 5000 * Math.pow(2, backoff))); backoff += 1; - core.info("Polling to see if we somehow have an instance up"); - instanceId = yield ((_a = ec2Client.getInstancesForTags("running")[0]) === null || _a === void 0 ? void 0 : _a.instanceId); + if (config.githubActionRunnerConcurrency > 0) { + core.info("Polling to see if we somehow have an instance up"); + instanceId = yield ((_a = ec2Client.getInstancesForTags("running")[0]) === null || _a === void 0 ? void 0 : _a.instanceId); + } } if (instanceId) { core.info("Successfully requested/found instance with ID " + instanceId); diff --git a/.github/spot-runner-action/src/ec2.ts b/.github/spot-runner-action/src/ec2.ts index 83211f7bba4..3879b1dea1d 100644 --- a/.github/spot-runner-action/src/ec2.ts +++ b/.github/spot-runner-action/src/ec2.ts @@ -39,6 +39,9 @@ export class Ec2Instance { this.client = new AWS.EC2({ credentials: this.credentials, region: this.config.awsRegion, + maxRetries: 3, + // base 10 seconds for the exponential backoff, up to 3 times + retryDelayOptions: { base: 10000 }, }); this.tags = this.getTags(); @@ -51,6 +54,9 @@ export class Ec2Instance { this.client = new AWS.EC2({ credentials: credentials, region: this.config.awsRegion, + maxRetries: 3, + // base 10 seconds for the exponential backoff, up to 3 times + retryDelayOptions: {base: 10000} }); } return this.client; diff --git a/.github/spot-runner-action/src/main.ts b/.github/spot-runner-action/src/main.ts index 7b7173f5795..00cbe457dd0 100644 --- a/.github/spot-runner-action/src/main.ts +++ b/.github/spot-runner-action/src/main.ts @@ -88,8 +88,10 @@ async function requestAndWaitForSpot(config: ActionConfig): Promise { // wait 10 seconds await new Promise((r) => setTimeout(r, 5000 * 2 ** backoff)); backoff += 1; - core.info("Polling to see if we somehow have an instance up"); - instanceId = await ec2Client.getInstancesForTags("running")[0]?.instanceId; + if (config.githubActionRunnerConcurrency > 0) { + core.info("Polling to see if we somehow have an instance up"); + instanceId = await ec2Client.getInstancesForTags("running")[0]?.instanceId; + } } if (instanceId) { core.info("Successfully requested/found instance with ID " + instanceId); diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index de68cd28210..1b1f5234ddb 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -1,7 +1,7 @@ name: CI (ARM) on: push: - branches: [disabled] # [master] + branches: [master] workflow_dispatch: inputs: {} concurrency: @@ -20,14 +20,10 @@ jobs: needs: setup runs-on: master-arm steps: - - { - uses: actions/checkout@v4, - with: { ref: "${{ github.event.pull_request.head.sha }}" }, - } + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } - uses: ./.github/ci-setup-action with: - dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" - # must be globally unique for build x runner concurrency_key: build-master-arm # prepare images locally, tagged by commit hash - name: "Build E2E Image" @@ -39,13 +35,10 @@ jobs: needs: build runs-on: master-arm steps: - - { - uses: actions/checkout@v4, - with: { ref: "${{ github.event.pull_request.head.sha }}" }, - } + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } - uses: ./.github/ci-setup-action with: - dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" concurrency_key: e2e-master-arm-e2e-tests - name: Test working-directory: ./yarn-project/end-to-end/ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3aeab526ec8..184848f54f4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,6 +32,39 @@ jobs: username: ${{ github.event.pull_request.user.login || github.actor }} runner_type: builder-x86 secrets: inherit + # job just to alert that you *might* hit missing CI files, but if not you can continue + + ci-consistency: + runs-on: ubuntu-latest + # Only check PRs for consistency (not master) + if: ${{ github.event.pull_request.head.sha != '' }} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Checkout Merge Pipeline Files + uses: actions/checkout@v4 + with: + path: merge-commit-pipeline-files + sparse-checkout: | + .github/workflows/ci.yml + .github/workflows/setup-runner.yml + + - name: Ensure CI Consistency + if: ${{ github.event.pull_request.head.sha != '' }} + run: | + # Compare the checked-out CI configuration files with the reference files + if ! git diff --no-index .github/workflows/ci.yml merge-commit-pipeline-files/.github/workflows/ci.yml; then + echo "Error: ci.yml changes in master (or PR base). *Usually* you can continue just fine. If you hit trouble with missing files, please merge these changes. This is to alert about potential surprises from Github Action's merge behavior." + exit 1 + fi + if ! git diff --no-index .github/workflows/setup-runner.yml merge-commit-pipeline-files/.github/workflows/setup-runner.yml; then + echo "Error: setup-runner.yml changes in master (or PR base). *Usually* you can continue just fine. If you hit trouble with missing files, please merge these changes. This is to alert about potential surprises from Github Action's merge behavior." + exit 1 + fi + build: needs: setup runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 @@ -73,7 +106,7 @@ jobs: timeout-minutes: 40 uses: ./.github/ensure-tester-with-images with: - runner_type: ${{ matrix.test == 'client-prover-integration' && '32core-tester-x86' || '8core-tester-x86' }} + runner_type: ${{ contains(matrix.test, 'prover') && '64core-tester-x86' || '8core-tester-x86' }} builder_type: builder-x86 # these are copied to the tester and expected by the earthly command below # if they fail to copy, it will try to build them on the tester and fail @@ -102,7 +135,7 @@ jobs: uses: ./.github/ensure-tester-with-images timeout-minutes: 40 with: - runner_type: 16core-tester-x86 + runner_type: ${{ contains(matrix.test, 'prover') && '64core-tester-x86' || '16core-tester-x86' }} builder_type: builder-x86 # these are copied to the tester and expected by the earthly command below # if they fail to copy, it will try to build them on the tester and fail @@ -208,9 +241,21 @@ jobs: - uses: ./.github/ci-setup-action with: concurrency_key: noir-x86 - - name: "Test Noir JS packages" + - name: "Test Nargo" run: earthly-ci --no-output ./noir+test + noir-examples: + needs: setup + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: noir-examples-x86 + - name: "Test Noir examples" + run: earthly-ci --no-output ./noir+examples + noir-packages-test: needs: setup runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 @@ -357,7 +402,7 @@ jobs: concurrency_key: docs-preview-x86 - name: "Docs Preview" timeout-minutes: 40 - run: earthly-ci --no-output ./docs/+deploy-preview --PR=${{ github.event.number }} --AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} --NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }} + run: earthly-ci --no-output ./docs/+deploy-preview --ENV=staging --PR=${{ github.event.number }} --AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} --NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }} bb-bench: runs-on: ubuntu-latest @@ -420,20 +465,19 @@ jobs: merge-check: runs-on: ubuntu-latest needs: - [ - e2e, - bb-native-tests, - bb-bench, - yarn-project-formatting, - yarn-project-test, - prover-client-test, - bb-js-test, - barretenberg-acir-tests-bb-js, - barretenberg-acir-tests-bb, - barretenberg-acir-tests-sol, - noir-test, - noir-packages-test, - ] + - setup + - e2e + - bb-native-tests + - bb-bench + - yarn-project-formatting + - yarn-project-test + - prover-client-test + - bb-js-test + - barretenberg-acir-tests-bb-js + - barretenberg-acir-tests-bb + - barretenberg-acir-tests-sol + - noir-test + - noir-packages-test if: always() steps: - run: | diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index de6a0d6e893..3434e3432b3 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -53,30 +53,6 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - - name: Checkout Merge Pipeline Files - uses: actions/checkout@v4 - # Only check PRs for consistency (not master) - if: ${{ github.event.pull_request.head.sha != '' }} - with: - path: merge-commit-pipeline-files - sparse-checkout: | - .github/workflows/ci.yml - .github/workflows/setup-runner.yml - - - name: Ensure CI Consistency - # Only check PRs for consistency (not master) - if: ${{ github.event.pull_request.head.sha != '' }} - run: | - # Compare the checked-out CI configuration files with the reference files - if ! git diff --no-index .github/workflows/ci.yml merge-commit-pipeline-files/.github/workflows/ci.yml; then - echo "Error: ci.yml changes in master (or PR base). Please merge these changes. This is to prevent surprises from Github Action's merge behavior." - exit 1 - fi - if ! git diff --no-index .github/workflows/setup-runner.yml merge-commit-pipeline-files/.github/workflows/setup-runner.yml; then - echo "Error: setup-runner.yml changes in master (or PR base). Please merge these changes. This is to prevent surprises from Github Action's merge behavior." - exit 1 - fi - - name: Start EC2 runner uses: ./.github/ensure-builder with: diff --git a/.gitmodules b/.gitmodules index 32f3fa66dd1..f643e34f4ca 100644 --- a/.gitmodules +++ b/.gitmodules @@ -12,4 +12,4 @@ url = https://github.com/Arachnid/solidity-stringutils [submodule "barretenberg/sol/lib/openzeppelin-contracts"] path = barretenberg/sol/lib/openzeppelin-contracts - url = https://github.com/OpenZeppelin/openzeppelin-contracts \ No newline at end of file + url = https://github.com/OpenZeppelin/openzeppelin-contracts diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 14beeb02d67..af42b7a046a 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -311,6 +311,7 @@ dependencies = [ "acvm", "base64 0.21.7", "env_logger", + "flate2", "log", "noirc_driver", "noirc_errors", diff --git a/avm-transpiler/Cargo.toml b/avm-transpiler/Cargo.toml index 0e868d476bb..275af5ad673 100644 --- a/avm-transpiler/Cargo.toml +++ b/avm-transpiler/Cargo.toml @@ -19,4 +19,5 @@ regex = "1.10" env_logger = "0.11" log = "0.4" serde_json = "1.0" -serde = { version = "1.0.136", features = ["derive"]} +serde = { version = "1.0.136", features = ["derive"] } +flate2 = "1.0" diff --git a/avm-transpiler/src/transpile_contract.rs b/avm-transpiler/src/transpile_contract.rs index 1b2987403b0..1b449495baa 100644 --- a/avm-transpiler/src/transpile_contract.rs +++ b/avm-transpiler/src/transpile_contract.rs @@ -1,9 +1,10 @@ +use std::io::Read; + use base64::Engine; use log::info; use serde::{Deserialize, Serialize}; use acvm::acir::circuit::Program; -use noirc_errors::debug_info::DebugInfo; use noirc_errors::debug_info::ProgramDebugInfo; use crate::transpile::{brillig_to_avm, map_brillig_pcs_to_avm_pcs, patch_debug_info_pcs}; @@ -104,6 +105,21 @@ impl From for TranspiledContractArtifact { // Transpile to AVM let avm_bytecode = brillig_to_avm(brillig_bytecode, &brillig_pcs_to_avm_pcs); + // Gzip AVM bytecode. This has to be removed once we need to do bytecode verification. + let mut compressed_avm_bytecode = Vec::new(); + let mut encoder = + flate2::read::GzEncoder::new(&avm_bytecode[..], flate2::Compression::best()); + let _ = encoder.read_to_end(&mut compressed_avm_bytecode); + + log::info!( + "{}::{}: compressed {} to {} bytes ({}% reduction)", + contract.name, + function.name, + avm_bytecode.len(), + compressed_avm_bytecode.len(), + 100 - (compressed_avm_bytecode.len() * 100 / avm_bytecode.len()) + ); + // Patch the debug infos with updated PCs let debug_infos = patch_debug_info_pcs( &function.debug_symbols.debug_infos, @@ -117,7 +133,7 @@ impl From for TranspiledContractArtifact { is_unconstrained: function.is_unconstrained, custom_attributes: function.custom_attributes, abi: function.abi, - bytecode: base64::prelude::BASE64_STANDARD.encode(avm_bytecode), + bytecode: base64::prelude::BASE64_STANDARD.encode(compressed_avm_bytecode), debug_symbols: ProgramDebugInfo { debug_infos }, }, )); diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 1594ed7c6bc..99d7eb63fb1 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 8cb6cf04f5812579546d31654910f49e507f01e2 - parent = 92384ce9d69889ed63a913f8a53d2a4b9cc672bf + commit = af5ae45393b9dba4c77626c5f9591d53897ae6f2 + parent = 1fa59637a0829208d382d1dded36df33f4d61582 method = merge cmdver = 0.4.6 diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs.sh b/barretenberg/acir_tests/gen_inner_proof_inputs.sh index ea3a2ced8e0..38392d84d89 100755 --- a/barretenberg/acir_tests/gen_inner_proof_inputs.sh +++ b/barretenberg/acir_tests/gen_inner_proof_inputs.sh @@ -28,18 +28,18 @@ VFLAG=${VERBOSE:+-v} RFLAG=${RECURSIVE:+-r} echo "Write VK to file for assert_statement..." -$BIN write_vk $VFLAG -c $CRS_PATH -o +$BIN write_vk $VFLAG -c $CRS_PATH -o ./target/vk echo "Write VK as fields for recursion..." -$BIN vk_as_fields $VFLAG -c $CRS_PATH +$BIN vk_as_fields $VFLAG -c $CRS_PATH -k ./target/vk -o ./target/vk_fields.json echo "Generate proof to file..." [ -d "$PROOF_DIR" ] || mkdir $PWD/proofs [ -e "$PROOF_PATH" ] || touch $PROOF_PATH -$BIN prove $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/$PROOF_NAME" $RFLAG +$BIN prove $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/$PROOF_NAME" echo "Write proof as fields for recursion..." -$BIN proof_as_fields $VFLAG -c $CRS_PATH -p "./proofs/$PROOF_NAME" +$BIN proof_as_fields $VFLAG -c $CRS_PATH -p "./proofs/$PROOF_NAME" -k ./target/vk -o "./proofs/${PROOF_NAME}_fields.json" cat ./proofs/${PROOF_NAME}_fields.json echo diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh b/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh new file mode 100755 index 00000000000..511a6e9f03a --- /dev/null +++ b/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# Env var overrides: +# BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). +set -eu + +BIN=${BIN:-../cpp/build/bin/bb} +CRS_PATH=~/.bb-crs +BRANCH=master +VERBOSE=${VERBOSE:-} +RECURSIVE=true +PROOF_NAME="proof_a" + +if [ -f $BIN ]; then + BIN=$(realpath $BIN) +else + BIN=$(realpath $(which $BIN)) +fi + +export BRANCH + +./clone_test_vectors.sh + +cd acir_tests/assert_statement_recursive + +PROOF_DIR=$PWD/proofs +PROOF_PATH=$PROOF_DIR/$PROOF_NAME +VFLAG=${VERBOSE:+-v} +RFLAG=${RECURSIVE:+-r} + +echo "Write VK to file for assert_statement..." +$BIN write_vk_ultra_honk $VFLAG -c $CRS_PATH -o ./target/vk + +echo "Write VK as fields for recursion..." +$BIN vk_as_fields_ultra_honk $VFLAG -c $CRS_PATH -k ./target/vk -o ./target/vk_fields.json + +echo "Generate proof to file..." +[ -d "$PROOF_DIR" ] || mkdir $PWD/proofs +[ -e "$PROOF_PATH" ] || touch $PROOF_PATH +$BIN prove_ultra_honk $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/$PROOF_NAME" + +echo "Write proof as fields for recursion..." +$BIN proof_as_fields_honk $VFLAG -c $CRS_PATH -p "./proofs/$PROOF_NAME" -o "./proofs/${PROOF_NAME}_fields.json" + +cat ./proofs/${PROOF_NAME}_fields.json +echo diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 9b1df668943..f8308b5e7c0 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -678,6 +678,62 @@ template void write_vk_honk(const std::string& bytecodePa vinfo("vk written to: ", outputPath); } } + +/** + * @brief Outputs proof as vector of field elements in readable format. + * + * Communication: + * - stdout: The proof as a list of field elements is written to stdout as a string + * - Filesystem: The proof as a list of field elements is written to the path specified by outputPath + * + * + * @param proof_path Path to the file containing the serialized proof + * @param output_path Path to write the proof to + */ +void proof_as_fields_honk(const std::string& proof_path, const std::string& output_path) +{ + auto proof = from_buffer>(read_file(proof_path)); + auto json = proof_to_json(proof); + + if (output_path == "-") { + writeStringToStdout(json); + vinfo("proof as fields written to stdout"); + } else { + write_file(output_path, { json.begin(), json.end() }); + vinfo("proof as fields written to: ", output_path); + } +} + +/** + * @brief Converts a verification key from a byte array into a list of field elements + * + * Why is this needed? + * This follows the same rationale as `proofAsFields`. + * + * Communication: + * - stdout: The verification key as a list of field elements is written to stdout as a string + * - Filesystem: The verification key as a list of field elements is written to the path specified by outputPath + * + * @param vk_path Path to the file containing the serialized verification key + * @param output_path Path to write the verification key to + */ +template void vk_as_fields_honk(const std::string& vk_path, const std::string& output_path) +{ + using VerificationKey = Flavor::VerificationKey; + + auto verification_key = std::make_shared(from_buffer(read_file(vk_path))); + std::vector data = verification_key->to_field_elements(); + + auto json = vk_to_json(data); + if (output_path == "-") { + writeStringToStdout(json); + vinfo("vk as fields written to stdout"); + } else { + write_file(output_path, { json.begin(), json.end() }); + vinfo("vk as fields written to: ", output_path); + } +} + /** * @brief Creates a proof for an ACIR circuit, outputs the proof and verification key in binary and 'field' format * @@ -831,6 +887,15 @@ int main(int argc, char* argv[]) } else if (command == "write_vk_goblin_ultra_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); write_vk_honk(bytecode_path, output_path); + } else if (command == "proof_as_fields_honk") { + std::string output_path = get_option(args, "-o", proof_path + "_fields.json"); + proof_as_fields_honk(proof_path, output_path); + } else if (command == "vk_as_fields_ultra_honk") { + std::string output_path = get_option(args, "-o", vk_path + "_fields.json"); + vk_as_fields_honk(vk_path, output_path); + } else if (command == "vk_as_fields_goblin_ultra_honk") { + std::string output_path = get_option(args, "-o", vk_path + "_fields.json"); + vk_as_fields_honk(vk_path, output_path); } else { std::cerr << "Unknown command: " << command << "\n"; return 1; diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp index 4811d32407a..53c52675952 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp @@ -41,7 +41,7 @@ template class ZeroMorphProver_ { // TODO(#742): Set this N_max to be the number of G1 elements in the mocked zeromorph SRS once it's in place. // (Then, eventually, set it based on the real SRS). For now we set it to be large but more or less arbitrary. - static const size_t N_max = 1 << 22; + static const size_t N_max = 1 << 23; public: /** diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp index 8e1a602a576..defc038a4d8 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp @@ -230,4 +230,19 @@ WASM_EXPORT void acir_write_vk_ultra_honk(uint8_t const* acir_vec, uint8_t** out ProverInstance prover_inst(builder); VerificationKey vk(prover_inst.proving_key); *out = to_heap_buffer(to_buffer(vk)); +} + +WASM_EXPORT void acir_proof_as_fields_ultra_honk(uint8_t const* proof_buf, fr::vec_out_buf out) +{ + auto proof = from_buffer>(from_buffer>(proof_buf)); + *out = to_heap_buffer(proof); +} + +WASM_EXPORT void acir_vk_as_fields_ultra_honk(uint8_t const* vk_buf, fr::vec_out_buf out_vkey) +{ + using VerificationKey = UltraFlavor::VerificationKey; + + auto verification_key = std::make_shared(from_buffer(vk_buf)); + std::vector vkey_as_fields = verification_key->to_field_elements(); + *out_vkey = to_heap_buffer(vkey_as_fields); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/c_bind.cpp b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/c_bind.cpp index c7fac7ef7c6..78bb4a04fc5 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/c_bind.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/c_bind.cpp @@ -15,6 +15,17 @@ WASM_EXPORT void ecc_grumpkin__mul(uint8_t const* point_buf, uint8_t const* scal write(result, r); } +// Silencing warnings about reserved identifiers. Fixing would break downstream code that calls our WASM API. +// NOLINTBEGIN(cert-dcl37-c, cert-dcl51-cpp, bugprone-reserved-identifier) +WASM_EXPORT void ecc_grumpkin__add(uint8_t const* point_a_buf, uint8_t const* point_b_buf, uint8_t* result) +{ + using serialize::write; + auto point_a = from_buffer(point_a_buf); + auto point_b = from_buffer(point_b_buf); + grumpkin::g1::affine_element r = point_a + point_b; + write(result, r); +} + // multiplies a vector of points by a single scalar. Returns a vector of points (this is NOT a multi-exponentiation) WASM_EXPORT void ecc_grumpkin__batch_mul(uint8_t const* point_buf, uint8_t const* scalar_buf, diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_conversion.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_conversion.hpp index 3e6cd34f41e..e19c535da31 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_conversion.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_conversion.hpp @@ -18,7 +18,7 @@ namespace bb::field_conversion { */ template constexpr size_t calc_num_bn254_frs() { - if constexpr (IsAnyOf) { + if constexpr (IsAnyOf) { return 1; } else if constexpr (IsAnyOf) { return T::Params::NUM_BN254_SCALARS; @@ -48,7 +48,7 @@ template T convert_from_bn254_frs(std::span fr_vec) if constexpr (IsAnyOf) { ASSERT(fr_vec.size() == 1); return bool(fr_vec[0]); - } else if constexpr (IsAnyOf) { + } else if constexpr (IsAnyOf) { ASSERT(fr_vec.size() == 1); return static_cast(fr_vec[0]); } else if constexpr (IsAnyOf) { @@ -89,7 +89,7 @@ std::vector convert_grumpkin_fr_to_bn254_frs(const grumpkin::fr& val); */ template std::vector convert_to_bn254_frs(const T& val) { - if constexpr (IsAnyOf) { + if constexpr (IsAnyOf) { std::vector fr_vec{ val }; return fr_vec; } else if constexpr (IsAnyOf) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp index 7fcfa4d483e..a417fbe4a97 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp @@ -575,6 +575,31 @@ class GoblinUltraFlavor { lagrange_last, lagrange_ecc_op, databus_id); + + /** + * @brief Serialize verification key to field elements + * + * @return std::vector + */ + std::vector to_field_elements() + { + std::vector elements; + std::vector circuit_size_elements = bb::field_conversion::convert_to_bn254_frs(this->circuit_size); + elements.insert(elements.end(), circuit_size_elements.begin(), circuit_size_elements.end()); + // do the same for the rest of the fields + std::vector num_public_inputs_elements = + bb::field_conversion::convert_to_bn254_frs(this->num_public_inputs); + elements.insert(elements.end(), num_public_inputs_elements.begin(), num_public_inputs_elements.end()); + std::vector pub_inputs_offset_elements = + bb::field_conversion::convert_to_bn254_frs(this->pub_inputs_offset); + elements.insert(elements.end(), pub_inputs_offset_elements.begin(), pub_inputs_offset_elements.end()); + + for (Commitment& comm : this->get_all()) { + std::vector comm_elements = bb::field_conversion::convert_to_bn254_frs(comm); + elements.insert(elements.end(), comm_elements.begin(), comm_elements.end()); + } + return elements; + } }; /** * @brief A container for storing the partially evaluated multivariates produced by sumcheck. diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_recursive_flavor.hpp index 44f04655429..393014d1f48 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_recursive_flavor.hpp @@ -102,6 +102,7 @@ template class GoblinUltraRecursiveFlavor_ { public: VerificationKey(const size_t circuit_size, const size_t num_public_inputs) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/983): Think about if these should be witnesses this->circuit_size = circuit_size; this->log_circuit_size = numeric::get_msb(circuit_size); this->num_public_inputs = num_public_inputs; @@ -151,6 +152,40 @@ template class GoblinUltraRecursiveFlavor_ { this->lagrange_ecc_op = Commitment::from_witness(builder, native_key->lagrange_ecc_op); this->databus_id = Commitment::from_witness(builder, native_key->databus_id); }; + + /** + * @brief Deserialize a verification key from a vector of field elements + * + * @param builder + * @param elements + */ + VerificationKey(CircuitBuilder& builder, std::span elements) + { + // deserialize circuit size + size_t num_frs_read = 0; + size_t num_frs_FF = bb::stdlib::field_conversion::calc_num_bn254_frs(); + size_t num_frs_Comm = bb::stdlib::field_conversion::calc_num_bn254_frs(); + + this->circuit_size = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + this->num_public_inputs = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + + this->pub_inputs_offset = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + + for (Commitment& comm : this->get_all()) { + comm = bb::stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_Comm)); + num_frs_read += num_frs_Comm; + } + } }; /** diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index fe5dca553f4..5422535e0d2 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -546,6 +546,31 @@ class UltraFlavor { table_4, lagrange_first, lagrange_last); + + /** + * @brief Serialize verification key to field elements + * + * @return std::vector + */ + std::vector to_field_elements() + { + std::vector elements; + std::vector circuit_size_elements = bb::field_conversion::convert_to_bn254_frs(this->circuit_size); + elements.insert(elements.end(), circuit_size_elements.begin(), circuit_size_elements.end()); + // do the same for the rest of the fields + std::vector num_public_inputs_elements = + bb::field_conversion::convert_to_bn254_frs(this->num_public_inputs); + elements.insert(elements.end(), num_public_inputs_elements.begin(), num_public_inputs_elements.end()); + std::vector pub_inputs_offset_elements = + bb::field_conversion::convert_to_bn254_frs(this->pub_inputs_offset); + elements.insert(elements.end(), pub_inputs_offset_elements.begin(), pub_inputs_offset_elements.end()); + + for (Commitment& comm : this->get_all()) { + std::vector comm_elements = bb::field_conversion::convert_to_bn254_frs(comm); + elements.insert(elements.end(), comm_elements.begin(), comm_elements.end()); + } + return elements; + } }; /** diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp index bf220475341..e72937089d6 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp @@ -274,6 +274,7 @@ template class UltraRecursiveFlavor_ { public: VerificationKey(const size_t circuit_size, const size_t num_public_inputs) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/983): Think about if these should be witnesses this->circuit_size = circuit_size; this->log_circuit_size = numeric::get_msb(circuit_size); this->num_public_inputs = num_public_inputs; @@ -317,6 +318,40 @@ template class UltraRecursiveFlavor_ { this->lagrange_first = Commitment::from_witness(builder, native_key->lagrange_first); this->lagrange_last = Commitment::from_witness(builder, native_key->lagrange_last); }; + + /** + * @brief Deserialize a verification key from a vector of field elements + * + * @param builder + * @param elements + */ + VerificationKey(CircuitBuilder& builder, std::span elements) + { + // deserialize circuit size + size_t num_frs_read = 0; + size_t num_frs_FF = bb::stdlib::field_conversion::calc_num_bn254_frs(); + size_t num_frs_Comm = bb::stdlib::field_conversion::calc_num_bn254_frs(); + + this->circuit_size = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + this->num_public_inputs = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + + this->pub_inputs_offset = uint64_t(stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_FF)) + .get_value()); + num_frs_read += num_frs_FF; + + for (Commitment& comm : this->get_all()) { + comm = bb::stdlib::field_conversion::convert_from_bn254_frs( + builder, elements.subspan(num_frs_read, num_frs_read + num_frs_Comm)); + num_frs_read += num_frs_Comm; + } + } }; /** diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/aztec_constants.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/aztec_constants.hpp index 62246d48614..badf02f827f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/aztec_constants.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/aztec_constants.hpp @@ -11,9 +11,10 @@ const size_t MAX_NEW_L2_TO_L1_MSGS_PER_CALL = 2; const size_t MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 16; const size_t MAX_PUBLIC_DATA_READS_PER_CALL = 16; const size_t MAX_NOTE_HASH_READ_REQUESTS_PER_CALL = 32; -const size_t MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 2; -const size_t MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL = 2; -const size_t MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL = 1; +const size_t MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 32; +const size_t MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL = 32; +const size_t MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL = 16; +const size_t MAX_NOTE_ENCRYPTED_LOGS_PER_CALL = 16; const size_t MAX_ENCRYPTED_LOGS_PER_CALL = 4; const size_t MAX_UNENCRYPTED_LOGS_PER_CALL = 4; const size_t MAX_NEW_NOTE_HASHES_PER_TX = 64; @@ -24,9 +25,10 @@ const size_t MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 32; const size_t MAX_PUBLIC_DATA_READS_PER_TX = 32; const size_t MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; const size_t MAX_NOTE_HASH_READ_REQUESTS_PER_TX = 128; -const size_t MAX_NULLIFIER_READ_REQUESTS_PER_TX = 8; -const size_t MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX = 8; -const size_t MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX = 4; +const size_t MAX_NULLIFIER_READ_REQUESTS_PER_TX = 128; +const size_t MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX = 128; +const size_t MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX = 64; +const size_t MAX_NOTE_ENCRYPTED_LOGS_PER_TX = 64; const size_t MAX_ENCRYPTED_LOGS_PER_TX = 8; const size_t MAX_UNENCRYPTED_LOGS_PER_TX = 8; const size_t NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; @@ -61,8 +63,7 @@ const size_t ARGS_HASH_CHUNK_COUNT = 64; const size_t MAX_ARGS_LENGTH = ARGS_HASH_CHUNK_COUNT * ARGS_HASH_CHUNK_LENGTH; const size_t INITIAL_L2_BLOCK_NUM = 1; const size_t BLOB_SIZE_IN_BYTES = 31 * 4096; -const size_t NESTED_CALL_L2_GAS_BUFFER = 20000; -const size_t MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 32000; +const size_t MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 20000; const size_t MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS = 3000; const size_t MAX_PACKED_BYTECODE_SIZE_PER_UNCONSTRAINED_FUNCTION_IN_FIELDS = 3000; const size_t REGISTERER_PRIVATE_FUNCTION_BROADCASTED_ADDITIONAL_FIELDS = 19; @@ -94,6 +95,8 @@ const size_t NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; const size_t SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; const size_t PARTIAL_STATE_REFERENCE_LENGTH = 6; const size_t READ_REQUEST_LENGTH = 2; +const size_t LOG_HASH_LENGTH = 3; +const size_t NOTE_LOG_HASH_LENGTH = 4; const size_t NOTE_HASH_LENGTH = 2; const size_t SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; const size_t NULLIFIER_LENGTH = 3; @@ -112,17 +115,17 @@ const size_t PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + 1 + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + - (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + - HEADER_LENGTH + TX_CONTEXT_LENGTH; + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_CALL) + (LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; const size_t PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + - (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + - (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + - AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + + /* transaction_fee */ 1; const size_t PRIVATE_CALL_STACK_ITEM_LENGTH = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; const size_t PUBLIC_CONTEXT_INPUTS_LENGTH = @@ -137,24 +140,24 @@ const size_t VALIDATION_REQUESTS_LENGTH = (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); const size_t PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; const size_t COMBINED_ACCUMULATED_DATA_LENGTH = - MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + + MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 5 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; const size_t COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; const size_t CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; const size_t CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; const size_t PRIVATE_ACCUMULATED_DATA_LENGTH = (SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + - (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + - (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + - (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX) + (LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); const size_t PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + CALL_REQUEST_LENGTH + AZTEC_ADDRESS_LENGTH; const size_t PUBLIC_ACCUMULATED_DATA_LENGTH = (MAX_NEW_NOTE_HASHES_PER_TX * NOTE_HASH_LENGTH) + (MAX_NEW_NULLIFIERS_PER_TX * NULLIFIER_LENGTH) + - (MAX_NEW_L2_TO_L1_MSGS_PER_TX * 1) + (MAX_ENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH) + - (MAX_UNENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH) + 2 + + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * 1) + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX) + + (MAX_ENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH) + (MAX_UNENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH) + 2 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + (MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX * CALL_REQUEST_LENGTH) + GAS_LENGTH; const size_t PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = diff --git a/barretenberg/ts/src/barretenberg_api/index.ts b/barretenberg/ts/src/barretenberg_api/index.ts index 44b10c57d8d..c7972d38869 100644 --- a/barretenberg/ts/src/barretenberg_api/index.ts +++ b/barretenberg/ts/src/barretenberg_api/index.ts @@ -603,6 +603,30 @@ export class BarretenbergApi { const out = result.map((r, i) => outTypes[i].fromBuffer(r)); return out[0]; } + + async acirProofAsFieldsUltraHonk(proofBuf: Uint8Array): Promise { + const inArgs = [proofBuf].map(serializeBufferable); + const outTypes: OutputType[] = [VectorDeserializer(Fr)]; + const result = await this.wasm.callWasmExport( + 'acir_proof_as_fields_ultra_honk', + inArgs, + outTypes.map(t => t.SIZE_IN_BYTES), + ); + const out = result.map((r, i) => outTypes[i].fromBuffer(r)); + return out[0]; + } + + async acirVkAsFieldsUltraHonk(vkBuf: Uint8Array): Promise { + const inArgs = [vkBuf].map(serializeBufferable); + const outTypes: OutputType[] = [VectorDeserializer(Fr)]; + const result = await this.wasm.callWasmExport( + 'acir_vk_as_fields_ultra_honk', + inArgs, + outTypes.map(t => t.SIZE_IN_BYTES), + ); + const out = result.map((r, i) => outTypes[i].fromBuffer(r)); + return out as any; + } } export class BarretenbergApiSync { constructor(protected wasm: BarretenbergWasm) {} @@ -1183,4 +1207,28 @@ export class BarretenbergApiSync { const out = result.map((r, i) => outTypes[i].fromBuffer(r)); return out[0]; } + + acirProofAsFieldsUltraHonk(proofBuf: Uint8Array): Fr[] { + const inArgs = [proofBuf].map(serializeBufferable); + const outTypes: OutputType[] = [VectorDeserializer(Fr)]; + const result = this.wasm.callWasmExport( + 'acir_proof_as_fields_ultra_honk', + inArgs, + outTypes.map(t => t.SIZE_IN_BYTES), + ); + const out = result.map((r, i) => outTypes[i].fromBuffer(r)); + return out[0]; + } + + acirVkAsFieldsUltraHonk(vkBuf: Uint8Array): Fr[] { + const inArgs = [vkBuf].map(serializeBufferable); + const outTypes: OutputType[] = [VectorDeserializer(Fr)]; + const result = this.wasm.callWasmExport( + 'acir_vk_as_fields_ultra_honk', + inArgs, + outTypes.map(t => t.SIZE_IN_BYTES), + ); + const out = result.map((r, i) => outTypes[i].fromBuffer(r)); + return out as any; + } } diff --git a/barretenberg/ts/src/main.ts b/barretenberg/ts/src/main.ts index 2ac32e23544..5e864f7867d 100755 --- a/barretenberg/ts/src/main.ts +++ b/barretenberg/ts/src/main.ts @@ -416,6 +416,50 @@ export async function verifyUltraHonk(proofPath: string, vkPath: string) { await api.destroy(); } } + +export async function proofAsFieldsUltraHonk(proofPath: string, outputPath: string) { + const { api } = await initLite(); + try { + debug('outputting proof as vector of fields'); + const proofAsFields = await api.acirProofAsFieldsUltraHonk(readFileSync(proofPath)); + const jsonProofAsFields = JSON.stringify(proofAsFields.map(f => f.toString())); + + if (outputPath === '-') { + process.stdout.write(jsonProofAsFields); + debug(`proofAsFieldsUltraHonk written to stdout`); + } else { + writeFileSync(outputPath, jsonProofAsFields); + debug(`proofAsFieldsUltraHonk written to: ${outputPath}`); + } + + debug('done.'); + } finally { + await api.destroy(); + } +} + +export async function vkAsFieldsUltraHonk(vkPath: string, vkeyOutputPath: string) { + const { api } = await initLite(); + + try { + debug('serializing vk byte array into field elements'); + const vkAsFields = await api.acirVkAsFieldsUltraHonk(new RawBuffer(readFileSync(vkPath))); + const jsonVKAsFields = JSON.stringify(vkAsFields.map(f => f.toString())); + + if (vkeyOutputPath === '-') { + process.stdout.write(jsonVKAsFields); + debug(`vkAsFieldsUltraHonk written to stdout`); + } else { + writeFileSync(vkeyOutputPath, jsonVKAsFields); + debug(`vkAsFieldsUltraHonk written to: ${vkeyOutputPath}`); + } + + debug('done.'); + } finally { + await api.destroy(); + } +} + const program = new Command(); program.option('-v, --verbose', 'enable verbose logging', false); @@ -430,7 +474,7 @@ function handleGlobalOptions() { program .command('prove_and_verify') .description('Generate a proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .action(async ({ bytecodePath, witnessPath, crsPath }) => { handleGlobalOptions(); @@ -441,7 +485,7 @@ program program .command('prove_and_verify_ultra_honk') .description('Generate an UltraHonk proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .action(async ({ bytecodePath, witnessPath, crsPath }) => { handleGlobalOptions(); @@ -452,7 +496,7 @@ program program .command('prove_and_verify_goblin_ultra_honk') .description('Generate a GUH proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .action(async ({ bytecodePath, witnessPath, crsPath }) => { handleGlobalOptions(); @@ -463,7 +507,7 @@ program program .command('prove_and_verify_goblin') .description('Generate a Goblin proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .action(async ({ bytecodePath, witnessPath, crsPath }) => { handleGlobalOptions(); @@ -474,7 +518,7 @@ program program .command('prove') .description('Generate a proof and write it to a file.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') .action(async ({ bytecodePath, witnessPath, outputPath, crsPath }) => { @@ -485,7 +529,7 @@ program program .command('gates') .description('Print gate count to standard output.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .action(async ({ bytecodePath: bytecodePath }) => { handleGlobalOptions(); await gateCount(bytecodePath); @@ -505,7 +549,7 @@ program program .command('contract') .description('Output solidity verification key contract.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-o, --output-path ', 'Specify the path to write the contract', './target/contract.sol') .requiredOption('-k, --vk-path ', 'Path to a verification key. avoids recomputation.') .action(async ({ outputPath, vkPath }) => { @@ -516,8 +560,8 @@ program program .command('write_vk') .description('Output verification key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') - .requiredOption('-o, --output-path ', 'Specify the path to write the key') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-o, --output-path ', 'Specify the path to write the key') .action(async ({ bytecodePath, outputPath, crsPath }) => { handleGlobalOptions(); await writeVk(bytecodePath, crsPath, outputPath); @@ -526,7 +570,7 @@ program program .command('write_pk') .description('Output proving key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .requiredOption('-o, --output-path ', 'Specify the path to write the key') .action(async ({ bytecodePath, outputPath, crsPath }) => { handleGlobalOptions(); @@ -566,7 +610,7 @@ program program .command('prove_ultra_honk') .description('Generate a proof and write it to a file.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') .action(async ({ bytecodePath, witnessPath, outputPath, crsPath }) => { @@ -577,7 +621,7 @@ program program .command('write_vk_ultra_honk') .description('Output verification key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/acir.gz') + .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') .requiredOption('-o, --output-path ', 'Specify the path to write the key') .action(async ({ bytecodePath, outputPath, crsPath }) => { handleGlobalOptions(); @@ -595,4 +639,24 @@ program process.exit(result ? 0 : 1); }); +program + .command('proof_as_fields_honk') + .description('Return the proof as fields elements') + .requiredOption('-p, --proof-path ', 'Specify the proof path') + .requiredOption('-o, --output-path ', 'Specify the JSON path to write the proof fields') + .action(async ({ proofPath, outputPath }) => { + handleGlobalOptions(); + await proofAsFieldsUltraHonk(proofPath, outputPath); + }); + +program + .command('vk_as_fields_ultra_honk') + .description('Return the verification key represented as fields elements.') + .requiredOption('-k, --vk-path ', 'Path to verification key.') + .requiredOption('-o, --output-path ', 'Specify the JSON path to write the verification key fields.') + .action(async ({ vkPath, outputPath }) => { + handleGlobalOptions(); + await vkAsFieldsUltraHonk(vkPath, outputPath); + }); + program.name('bb.js').parse(process.argv); diff --git a/docs/Earthfile b/docs/Earthfile index b7a020426bd..9dcd7c74618 100644 --- a/docs/Earthfile +++ b/docs/Earthfile @@ -10,19 +10,18 @@ deps: RUN yarn install --frozen-lockfile build: + ARG ENV BUILD ../yarn-project/+build-dev BUILD ../+release-meta FROM +deps - COPY --dir ../yarn-project/+build-dev/usr/src /usr COPY ../+release-meta/usr/src/.release-please-manifest.json /usr/src - COPY . . - RUN ./scripts/build.sh SAVE ARTIFACT build serve: + ARG ENV FROM +deps COPY +build/build build COPY ./static static @@ -34,8 +33,10 @@ serve: SAVE ARTIFACT /usr/src/docs SAVE IMAGE aztecprotocol/docs-server + deploy-preview: BUILD ../yarn-project/+scripts-prod + ARG ENV ARG NETLIFY_AUTH_TOKEN ARG NETLIFY_SITE_ID ARG AZTEC_BOT_COMMENTER_GITHUB_TOKEN diff --git a/docs/README.md b/docs/README.md index f1fae6d86ce..3d53d01e512 100644 --- a/docs/README.md +++ b/docs/README.md @@ -5,7 +5,13 @@ Documentation for the Aztec Network, built with docusaurus You can view the latest successful build here: https://docs.aztec.network -## Files +## Docusaurus + +This website is built using [Docusaurus 3](https://docusaurus.io/), a modern static website generator. + +### Files + +Here are the most relevant files you should be aware of: - `.gitignore` - This specifies which files Git should ignore when committing and pushing to remote repositories. - `docusaurus.config.js` - This is the configuration file for Docusaurus. You can manage the links in the header and footer, and site metadata here. A more in-depth introduction to this configuration file is available on the [Docusaurus website](https://docusaurus.io/docs/configuration) and full documentation for the API is [here](https://docusaurus.io/docs/docusaurus.config.js). @@ -15,52 +21,61 @@ You can view the latest successful build here: https://docs.aztec.network The .md files in the `docs/` directory are the docs. See the [Docusaurus website](https://docusaurus.io/docs/docs-introduction) for the full documentation on how to create docs and to manage the metadata. -## Contributing - -We welcome contributions from the community. Please review our [contribution guidelines](CONTRIBUTING.md) for more information. - -## Docusaurus - -This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. - ### Installation +To install the dependencies and dev dependencies, run: + ``` $ yarn ``` ### Development -#### Locally +Aztec docs pull some code from the rest of the repository. This allows for great flexibility and maintainability. Some documentation is also autogenerated. + +For that reason, there's a preprocessing step. You can run that step ad-hoc with `yarn preprocess` or `yarn preprocess:dev` if you want it to stay running and watching for changes. -To build; serve to `localhost:3000`; and watch for changes: +This step does the following: +- Pulls the code from the source files using the `#include` macros explained below. +- Autogenerates documentation using the scripts in the `src` file. +- Puts the final documentation in a `processed-docs` folder. + +> [!NOTE] +> You likely want to benefit from webpack's hot reload, which allows you to immediately see your changes when you develop on the docs. For this reason, the `yarn dev` commands will add the `ENV=dev` environment variable, which makes docusaurus serve the `docs folder` instead of the `processed docs`. +> If you're making changes to included code or aztec.nr reference, you can run `yarn docs` instead. + +#### Run locally + +To run docusaurus development server and use hot reload (watch for changes), run: ``` -$ yarn start:dev:local +$ yarn dev:local ``` -#### Remotely (on mainframe) +#### Run remotely (on mainframe) -To build; serve to `localhost:3000`; and watch for changes: +It's common for developers to work on codespaces or other remote targets. For this you need to expose your development server. This is common enough to be the default development command: ``` -$ yarn start:dev +$ yarn dev ``` -This command preprocesses `#include_code` macros, then builds the html, then starts a local development server and opens up a browser window (at `localhost:3000`, by default). -Most changes are reflected live without having to restart the server. - ### Build +To build the final version of the docs (which includes processes not present in dev, like broken links checking and minification), you can run: + ``` $ yarn build ``` -This command generates static content into the `build` directory and can be served using any static contents hosting service. When run on Netlify, it will also build the typescript projects needed for extracting type information via typedoc. - +This command runs the preprocess command, generates static content into the `build` directory and can be served using any static contents hosting service. ## Macros +As mentioned above, Aztec docs pull code from the source files. This makes it easy to include sections of the source code in tutorials and other examples. + +This is done via macros which are processed in the `process` step described above. + ### `#include_code` You can embed code snippets into a `.md`/`.mdx` file from code which lives elsewhere in the repo. @@ -133,3 +148,7 @@ Alternatively, you can also use the `AztecPackagesVersion()` js function, which import { AztecPackagesVersion } from "@site/src/components/Version"; <>{AztecPackagesVersion()} ``` + +## Contributing + +We welcome contributions from the community. Please review our [contribution guidelines](CONTRIBUTING.md) for more information. diff --git a/docs/docs/aztec/aztec/concepts/_category_.json b/docs/docs/aztec/aztec/concepts/_category_.json index 5210ef5623a..717290c3db1 100644 --- a/docs/docs/aztec/aztec/concepts/_category_.json +++ b/docs/docs/aztec/aztec/concepts/_category_.json @@ -1,6 +1,6 @@ { - "position": 1, - "collapsible": true, - "collapsed": true, - "label": "Concepts" + "position": 2, + "collapsible": true, + "collapsed": true, + "label": "Concepts" } diff --git a/docs/docs/aztec/aztec/concepts/accounts/keys.md b/docs/docs/aztec/aztec/concepts/accounts/keys.md index fa41182032b..a0cc247d1b4 100644 --- a/docs/docs/aztec/aztec/concepts/accounts/keys.md +++ b/docs/docs/aztec/aztec/concepts/accounts/keys.md @@ -1,110 +1,179 @@ # Keys +The goal of this section is to give app developer a good idea what keys there are used in the system. +For a detailed description head over to the [protocol specification](/protocol-specs/addresses-and-keys/keys#cheat-sheet). -Typically, each account in Aztec is backed by two separate keys: +Each account in Aztec is backed by 4 key pairs: -- A **signing key** used for authenticating the owner of the account. -- A **privacy master key** used for deriving encryption and nullifying keys for managing private state. +- A **nullifier key pair** used for note nullifier computation, comprising the master nullifier secret key (`nsk_m`) and master nullifier public key (`Npk_m`). +- A **incoming viewing key pair** used to encrypt a note for the recipient, consisting of the master incoming viewing secret key (`ivsk_m`) and master incoming viewing public key (`Ivpk_m`). +- A **outgoing viewing key pair** used to encrypt a note for the sender, includes the master outgoing viewing secret key (`ovsk_m`) and master outgoing viewing public key (`Ovpk_m`). +- A **tagging key pair** used to compute tags in a [tagging note discovery scheme](/protocol-specs/private-message-delivery/private-msg-delivery#note-tagging), comprising the master tagging secret key (`tsk_m`) and master tagging public key (`Tpk_m`). -## Signing keys +:::info +All key pairs above are derived from a secret using a ZCash inspired scheme defined in [protocol specification](/protocol-specs/addresses-and-keys/keys#cheat-sheet). +::: -Signing keys allow their holder to act as their corresponding account in Aztec, similarly to the keys used for an Ethereum account. If a signing key is leaked, the user can potentially lose all their funds. +:::note +Additionally, there is typically a signing key pair which is used for authenticating the owner of the account. +However, since Aztec supports native [account abstraction](../accounts/index.md#what-is-account-abstraction) this is not defined in protocol. +Instead it's up to the account contract developer to implement it. +::: -Since Aztec implements full [signature abstraction](./index.md), signing keys depend on the account contract implementation for each user. Usually, an account contract will validate a signature of the incoming payload against a known public key. +## Public keys retrieval +The keys can either be retrieved from a key registry contract or from the [Private eXecution Environment (PXE)](/aztec/aztec/concepts/pxe/index.md). -This is a snippet of our Schnorr Account contract implementation, which uses Schnorr signatures for authentication: +:::note +The key registry is a canonical contract used to store user public keys. +Canonical in this context means that it is a contract whose functionality is essential for the protocol. +There is 1 key registry and its address is hardcoded in the protocol code. +::: -#include_code entrypoint /noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr rust +To retrieve them a developer can use one of the getters in Aztec.nr: -Still, different accounts may use different signing schemes, may require multi-factor authentication, or _may not even use signing keys_ and instead rely on other authentication mechanisms. Read [how to write an account contract](/tutorials/tutorials/write_accounts_contract.md) for a full example of how to manage authentication. +#include_code key-getters /noir-projects/aztec-nr/aztec/src/keys/getters.nr rust -Furthermore, and since signatures are fully abstracted, how the key is stored in the contract is abstracted as well and left to the developer of the account contract. Here are a few ideas on how to store them, each with their pros and cons. +If the keys are registered in the key registry these methods can be called without any setup. +If they are not there, it is necessary to first register the user as a recipient in our PXE. -### Using a private note +First we need to get a hold of recipient's [complete address](#complete-address). +Below are some ways how we could instantiate it after getting the information in a string form from a recipient: -Storing the signing public key in a private note makes it accessible from the entrypoint function, which is required to be a private function, and allows for rotating the key when needed. However, keep in mind that reading a private note requires nullifying it to ensure it is up to date, so each transaction you send will destroy and recreate the public key. This has the side effect of enforcing a strict ordering across all transactions, since each transaction will refer the instantiation of the private note from the previous one. +#include_code instantiate-complete-address /yarn-project/circuits.js/src/structs/complete_address.test.ts rust -### Using an immutable private note +Then to register the recipient's [complete address](#complete-address) in PXE we would call `registerRecipient` PXE endpoint using [Aztec.js](/aztec/aztec/core_components.md#aztecjs): -Similar to using a private note, but using an immutable private note removes the need to nullify the note on every read. This generates less nullifiers and commitments per transaction, and does not enforce an order across transactions. However, it does not allow the user to rotate their key should they lose it. +#include_code register-recipient /yarn-project/aztec.js/src/wallet/create_recipient.ts rust -### Using shared state +During private function execution these keys are obtained via an oracle call from PXE. -A compromise between the two solutions above is to use [shared state](/reference/reference/smart_contract_reference/storage/shared_state.md). This would not generate additional nullifiers and commitments for each transaction while allowing the user to rotate their key. However, this causes every transaction to now have a time-to-live determined by the frequency of the mutable shared state, as well as imposing restrictions on how fast keys can be rotated due to minimum delays. +## Key rotation +To prevent users from needing to migrate all their positions if some of their keys are leaked we allow for key rotation. +Key rotation can be performed by calling the corresponding function on key registry. +E.g. for nullifier key: -### Reusing the privacy master key +#include_code key-rotation /yarn-project/end-to-end/src/e2e_key_registry.test.ts rust -It is possible to use the privacy master key as the signing key also. Since this key is part of the address preimage (more on this on the privacy master key section), you can validate it against the account contract address rather than having to store it. However, this approach is not recommended since it reduces the security of the user's account. +Note that the notes directly contain `Npk_m`. +This means that it will be possible to nullify the notes with the same old key after the key rotation and attacker could still potentially steal them if there are no other guardrails in place (like for example account contract auth check). +These guardrails are typically in place so a user should not lose her notes even if this unfortunate accident happens. -### Using a separate keystore +## Scoped keys +To minimize damage of potential key leaks the keys are scoped (also called app-siloed) to the contract that requests them. +This means that the keys used for the same user in two different application contracts will be different and potential leak of the scoped keys would only affect 1 application. -Since there are no restrictions on the actions that an account contract may execute for authenticating a transaction (as long as these are all private function executions), the signing public keys can be stored in a [separate keystore contract](https://vitalik.ca/general/2023/06/09/three_transitions.html) that is checked on every call. This will incur in a higher proving time for each transaction, but has no additional cost in terms of fees, and allows for easier key management in a centralized contract. +This also allows per-application auditability. +A user may choose to disclose their incoming and outgoing viewing keys for a given application to an auditor or regulator (or for 3rd party interfaces, e.g. giving access to a block explorer to display my activity), as a means to reveal all their activity within that context, while retaining privacy across all other applications in the network. -## Privacy keys +In the case of nullifier keys, there is also a security reason involved. +Since the nullifier secret is exposed to the application contract to be used in the nullifier computation, the contract may accidentally or maliciously leak it. +If that happens, only the nullifier secret for that application is compromised (`nsk_app` and not `nsk_m`). -Each account is tied to a **privacy master key**. Unlike signing keys, privacy keys are enshrined at the protocol layer, are required to be Grumpkin keys, and are tied to their account address. These keys are used for deriving encryption and nullifying keys, scoped to each application, in a manner similar to BIP32. +Above we mentioned that the notes typically contain `Npk_m`. +It might seem like a mistake given that the notes are nullified with `nsk_app`. +This is intentional and instead of directly trying to derive `Npk_m` from `nsk_app` we instead verify that both of the keys were derived from the same `nsk_m` in our protocol circuits. +If you are curious how the derivation scheme works head over to [protocol specification](/protocol-specs/addresses-and-keys/example-usage/nullifier#diagram). -:::warning -At the time of this writing, privacy master keys are used by applications without any derivation whatsoever. This means that the private key is used directly as a nullifier secret for all applications, and the public key is used as an encryption key for all purposes. This is highly insecure, and will change to match the specification below in an upcoming release. -::: +## Protocol key types +All the keys below are Grumpkin keys (public keys derived on the Grumpkin curve). -### Addresses, partial addresses, and public keys +## Nullifier keys +Whenever a note is consumed, a nullifier deterministically derived from it is emitted. +This mechanisms prevents double-spends, since nullifiers are checked by the protocol to be unique. +Now, in order to preserve privacy, a third party should not be able to link a note hash to its nullifier - this link is enforced by the note implementation. +Therefore, calculating the nullifier for a note requires a secret from its owner. -When deploying a contract, the address is deterministically derived from the contract code, the constructor arguments, a salt, and a public key: +An application in Aztec.nr can request a secret from the current user for computing the nullifier of a note via the `request_nullifier_secret_key` API: -``` -partial_address := hash(salt, contract_code, constructor_hash) -address := hash(public_key, partial_address) -``` +#include_code nullifier /noir-projects/aztec-nr/value-note/src/value_note.nr rust -This public key corresponds to the privacy master key of the account. In order to manage private state, such as receiving an encrypted note, an account needs to share its partial address and public key, along with its address. This allows anyone to verify that the public key corresponds to the intended address. We call the address, partial address, and public key of a user their **complete address**. +Typically, `Npk_m` is stored in a note and later on, the note is nullified using the secret app-siloed version (denoted `nsk_app`). +`nsk_app` is derived by hashing `nsk_m` with the app contract address and it is necessary to present it to compute the nullifier. +Validity of `nsk_app` is verified by our [protocol kernel circuits](/protocol-specs/circuits/private-kernel-tail#verifying-and-splitting-ordered-data). -Contracts that are not meant to represent a user who handles private state, usually non-account contracts such as applications, do not need to provide a valid public key, and can instead just use zero to denote that they are not expected to receive private notes. +## Incoming viewing keys +The app-siloed version of public key (denoted `Ivpk_app`) is used to encrypt a note for a recipient and the the corresponding secret key (`ivsk_app`) is used by recipient during decryption. -:::info -A side effect of enshrining and encoding privacy keys into the account address is that these keys cannot be rotated if they are leaked. Read more about this in the [account abstraction section](./index.md#encryption-and-nullifying-keys). +## Outgoing viewing keys +App-siloed versions of outgoing viewing keys are denoted `ovsk_app` and `Ovpk_app`. +These keys are used to encrypt a note for a note sender which is necessary for reconstructing transaction history from on-chain data. +For example, during a token transfer, the token contract may dictate that the sender encrypts the note with value with the recipient's `Ivpk_app`, but also records the transfer with its own `Ovpk_app` for bookkeeping purposes. +If these keys were not used and a new device would be synched there would be no "direct" information available about notes that a user created for other people. + +## Tagging keys +Used to compute tags in a [tagging note discovery scheme](/protocol-specs/private-message-delivery/private-msg-delivery#note-tagging). + +:::note +Tagging note discovery scheme won't be present in our testnet so we are intentionally not providing you with much info yet. ::: -### Encryption keys +## Signing keys -The privacy master key is used to derive encryption keys. Encryption keys, as their name implies, are used for encrypting private notes for a recipient, where the public key is used for encryption and the corresponding private key used for decryption. +As mentioned above signing keys are not defined in protocol because of [account abstraction](../accounts/index.md#what-is-account-abstraction) and instead the key scheme is defined by the account contract. -In a future version, encryption keys will be differentiated between incoming and outgoing. When sending a note to another user, the sender will use the recipient's incoming encryption key for encrypting the data for them, and will optionally use their own outgoing encryption key for encrypting any data about the destination of that note. This is useful for reconstructing transaction history from on-chain data. For example, during a token transfer, the token contract may dictate that the sender encrypts the note with value with the recipient's incoming key, but also records the transfer with its own outgoing key for bookkeeping purposes. +Usually, an account contract will validate a signature of the incoming payload against a known signing public key. -An application in Aztec.nr can access the encryption public key for a given address using the oracle call `get_public_key`, which you can then use for calls such as `emit_encrypted_log`: +This is a snippet of our Schnorr Account contract implementation, which uses Schnorr signatures for authentication: -#include_code encrypted /noir-projects/aztec-nr/address-note/src/address_note.nr rust +#include_code entrypoint /noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr rust -:::info -In order to be able to provide the public encryption key for a given address, that public key needs to have been registered in advance. At the moment, there is no broadcasting mechanism for public keys, which means that you will need to manually register all addresses you intend to send encrypted notes to. You can do this via the `registerRecipient` method of the Private Execution Environment (PXE), callable either via aztec.js or the CLI. -Note that any accounts you own that have been added to the PXE are automatically registered. -::: +Still, different accounts may use different signing schemes, may require multi-factor authentication, or _may not even use signing keys_ and instead rely on other authentication mechanisms. Read [how to write an account contract](/tutorials/tutorials/write_accounts_contract.md) for a full example of how to manage authentication. -### Nullifier secrets +Furthermore, and since signatures are fully abstracted, how the key is stored in the contract is abstracted as well and left to the developer of the account contract. +In the following section we describe a few ways how an account contract could be architected to store signing keys. -In addition to deriving encryption keys, the privacy master key is used for deriving nullifier secrets. Whenever a private note is consumed, a nullifier deterministically derived from it is emitted. This mechanisms prevents double-spends, since nullifiers are checked by the protocol to be unique. Now, in order to preserve privacy, a third party should not be able to link a note commitment to its nullifier - this link is enforced by the note implementation. Therefore, calculating the nullifier for a note requires a secret from its owner. +### Storing signing keys -An application in Aztec.nr can request a secret from the current user for computing the nullifier of a note via the `request_nullifier_secret_key` api: +#### Using a private note -#include_code nullifier /noir-projects/aztec-nr/value-note/src/value_note.nr rust +Storing the signing public key in a private note makes it accessible from the entrypoint function, which is required to be a private function, and allows for rotating the key when needed. However, keep in mind that reading a private note requires nullifying it to ensure it is up to date, so each transaction you send will destroy and recreate the public key. This has the side effect of enforcing a strict ordering across all transactions, since each transaction will refer the instantiation of the private note from the previous one. -### Scoped keys +#### Using an immutable private note -:::warning -Keys are not yet scoped at the time of this writing. This will be implemented in a future release. -::: +Similar to using a private note, but using an immutable private note removes the need to nullify the note on every read. This generates less nullifiers and commitments per transaction, and does not enforce an order across transactions. However, it does not allow the user to rotate their key should they lose it. + +#### Using shared state + +A compromise between the two solutions above is to use [shared state](/reference/reference/smart_contract_reference/storage/shared_state.md). This would not generate additional nullifiers and commitments for each transaction while allowing the user to rotate their key. However, this causes every transaction to now have a time-to-live determined by the frequency of the mutable shared state, as well as imposing restrictions on how fast keys can be rotated due to minimum delays. -Even though they are all derived from the same privacy master key, all encryption and nullifier keys are scoped to the contract that requests them. This means that the encryption key used for the same user in two different application contracts will be different. The same applies to nullifier secrets. +#### Reusing some of the in-protocol keys -This allows per-application auditability. A user may choose to disclose their inbound and outbound encryption keys for a given application to an auditor or regulator, as a means to reveal all their activity within that context, while retaining privacy across all other applications in the network. +It is possible to use some of the key pairs defined in protocol (e.g. incoming viewing keys) as the signing key. +Since this key is part of the address preimage (more on this on the privacy master key section), you it can be validated against the account contract address rather than having to store it. +However, this approach is not recommended since it reduces the security of the user's account. -In the case of nullifier secrets, there is also a security reason involved. Since the nullifier secret is exposed in plain text to the application contract, the contract may accidentally or maliciously leak it. If that happens, only the nullifier secret for that application is compromised. +#### Using a separate keystore -### Security considerations +Since there are no restrictions on the actions that an account contract may execute for authenticating a transaction (as long as these are all private function executions), the signing public keys can be stored in a [separate keystore contract](https://vitalik.ca/general/2023/06/09/three_transitions.html) that is checked on every call. This will incur in a higher proving time for each transaction, but has no additional cost in terms of fees, and allows for easier key management in a centralized contract. -A leaked privacy master key means a loss of privacy for the affected user. An attacker who holds the privacy private key of a user can derive the encryption private keys to decrypt all past inbound and outbound private notes, and can derive the nullifier secrets to determine when these notes were consumed. +### Complete address -Nevertheless, the attacker cannot steal the affected user's funds, since authentication and access control depend on the signing keys and are managed by the user's account contract. +When deploying a contract, the contract address is deterministically derived using the following scheme: + -:::info -Note that, in the current architecture, the user's wallet needs direct access to the privacy private key, since the wallet needs to use this key for attempting decryption of all notes potentially sent to the user. This means that the privacy private key cannot be stored in a hardware wallet or hardware security module, since the wallet software uses the private key material directly. This may change in future versions in order to enhance security. +``` +partial_address := poseidon2("az_contract_partial_address_v1", contract_class_id, salted_initialization_hash) +public_keys_hash := poseidon2("az_public_keys_hash", Npk_m, Ivpk_m, Ovpk_m, Tpk_m) +address := poseidon2("az_contract_address_v1", public_keys_hash, partial_address) +``` + +Typically, for account contracts the public keys will be non-zero and for non-account contracts zero. +An example of a non-account contract which would have some of the keys non-zero is an escrow contract. +Escrow contract is a type of contract which on its own is an "owner" of a note meaning that it has a `Npk_m` registered and the notes contain this `Npk_m`. +Participants in this escrow contract would then somehow get a hold of the escrow's `nsk_m` and nullify the notes based on the logic of the escrow. +An example of an escrow contract is a betting contract. In this scenario, both parties involved in the bet would be aware of the escrow's `nsk_m`. +The escrow would then release the reward only to the party that provides a "proof of winning". + +Because of the contract address derivation scheme it is possible to check that a given set of public keys corresponds to a given address just by trying to recompute it. +Since this is commonly needed to be done when sending a note to an account whose keys are not yet registered in the key registry contract we coined the term **complete address** for the collection of: + +1. all the user's public keys, +2. partial address, +3. contract address. + +Once the complete address is shared with the sender, the sender can check that the address was correctly derived from the public keys and partial address and then send the notes to that address. +Because of this it is possible to send a note to an account whose account contract was not yet deployed. + +:::note +Note that since the individual [keys can be rotated](#key-rotation) complete address is used only for non-registered accounts. +For registered accounts key registry is always the source of truth. ::: diff --git a/docs/docs/aztec/aztec/concepts/index.md b/docs/docs/aztec/aztec/concepts/index.md index 690f2107ec6..dd91d9b5243 100644 --- a/docs/docs/aztec/aztec/concepts/index.md +++ b/docs/docs/aztec/aztec/concepts/index.md @@ -29,7 +29,7 @@ Private state works with UTXOs, or what we call notes. To keep things private, e Public state works similarly to other chains like Ethereum, behaving like a public ledger. Public data is stored in a [public data tree](/aztec/aztec/concepts/storage/trees/index.md#public-state-tree). -Aztec [smart contract](/aztec/aztec/concepts/smart_contracts/index.md) developers should keep in mind that different types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. +Aztec [smart contract](/aztec/aztec/concepts/smart_contracts/index.md) developers should keep in mind that different types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. ## Accounts @@ -51,7 +51,7 @@ Aztec allows private communications with Ethereum - ie no-one knows where the tr This is achieved through portals - these are smart contracts deployed on an EVM that are related to the Ethereum smart contract you want to interact with. -Learn more about portals [here](/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls). +Learn more about portals [here](/protocol-specs/l1-smart-contracts/index.md). ## Circuits @@ -72,4 +72,3 @@ Explore the Concepts for a deeper understanding into the components that make up ### Start coding Follow the [developer getting started guide](/getting_started/getting_started). - diff --git a/docs/docs/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls.md b/docs/docs/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls.md deleted file mode 100644 index f59fa89139b..00000000000 --- a/docs/docs/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls.md +++ /dev/null @@ -1,190 +0,0 @@ ---- -title: L1 <--> L2 communication ---- - -import Image from "@theme/IdealImage"; - -import Disclaimer from "../../../../../../src/components/Disclaimers/\_wip_disclaimer.mdx"; - - - -In Aztec, what we call _portals_ are the key element in facilitating communication between L1 and L2. While typical L2 solutions rely on synchronous communication with L1, Aztec's privacy-first nature means this is not possible. You can learn more about why in the previous section. - -Traditional L1 \<-\> L2 communication might involve direct calls between L2 nd L1 contracts. However, in Aztec, due to the privacy components and the way transactions are processed (kernel proofs built on historical data), direct calls between L1 and L2 would not be possible if we want to maintain privacy. - -Portals are the solution to this problem, acting as bridges for communication between the two layers. These portals can transmit messages from public functions in L1 to private functions in L2 and vice versa, thus enabling messaging while maintaining privacy. - -This page covers: - -- How portals enable privacy communication between L1 and L2 -- How messages are sent, received, and processed -- Message Boxes and how they work -- How and why linking of contracts between L1 and L2 occurs - -# Objective - -The goal is to set up a minimal-complexity mechanism, that will allow a base-layer (L1) and the Aztec Network (L2) to communicate arbitrary messages such that: - -- L2 functions can `call` L1 functions. -- L1 functions can `call` L2 functions. -- The rollup-block size have a limited impact by the messages and their size. - -# High Level Overview - -This document will contain communication abstractions that we use to support interaction between _private_ functions, _public_ functions and Layer 1 portal contracts. - -Fundamental restrictions for Aztec: - -- L1 and L2 have very different execution environments, stuff that is cheap on L1 is most often expensive on L2 and vice versa. As an example, `keccak256` is cheap on L1, but very expensive on L2. -- L1 and L2 have causal ordering, simply meaning that we cannot execute something on L1 that depends on something happening on L2 and vice versa. -- _Private_ function calls are fully "prepared" and proven by the user, which provides the kernel proof along with commitments and nullifiers to the sequencer. -- _Public_ functions altering public state (updatable storage) must be executed at the current "head" of the chain, which only the sequencer can ensure, so these must be executed separately to the _private_ functions. -- _Private_ and _public_ functions within Aztec are therefore ordered such that first _private_ functions are executed, and then _public_. For a more detailed description of why, see above. -- There is an **explicit 1:1 link** from a L2 contract to an L1 contract, and only the messages between a pair is allowed. See [Portal](#portal) for more information. -- Messages are consumables, and can only be consumed by the recipient. See [Message Boxes](#message-boxes) for more information. - -With the aforementioned restrictions taken into account, cross-chain messages can be operated in a similar manner to when _public_ functions must transmit information to _private_ functions. In such a scenario, a "message" is created and conveyed to the recipient for future use. It is worth noting that any call made between different domains (_private, public, cross-chain_) is unilateral in nature. In other words, the caller is unaware of the outcome of the initiated call until told when some later rollup is executed (if at all). This can be regarded as message passing, providing us with a consistent mental model across all domains, which is convenient. - -As an illustration, suppose a private function adds a cross-chain call. In such a case, the private function would not have knowledge of the result of the cross-chain call within the same rollup (since it has yet to be executed). - -Similarly to the ordering of private and public functions, we can also reap the benefits of intentionally ordering messages between L1 and L2. When a message is sent from L1 to L2, it has been "emitted" by an action in the past (an L1 interaction), allowing us to add it to the list of consumables at the "beginning" of the block execution. This practical approach means that a message could be consumed in the same block it is included. In a sophisticated setup, rollup $n$ could send an L2 to L1 message that is then consumed on L1, and the response is added already in $n+1$. However, messages going from L2 to L1 will be added as they are emitted. - -:::info -Because everything is unilateral and async, the application developer have to explicitly handle failure cases such that user can gracefully recover. Example where recovering is of utmost importance is token bridges, where it is very inconvenient if the locking of funds on one domain occur, but never the minting or unlocking on the other. -::: - -## Components - -### Portal - -A "portal" refers to the part of an application residing on L1, which is associated with a particular L2 address (the confidential part of the application). The link between them is established explicitly to reduce access control complexity. On public chains, access control information such as a whitelist in a mapping or similar data structure can simply be placed in public storage. However, this is not feasible for contracts in Aztec. Recall that public storage can only be accessed (up to date) by public functions which are called AFTER the private functions. This implies that access control values in public storage only work for public functions. One possible workaround is to store them in private data, but this is not always practical for generic token bridges and other similar use cases where the values must be publicly known to ensure that the system remains operational. Instead, we chose to use a hard link between the portal and the L2 address. - -:::info -Note, that we at no point require the "portal" to be a contract, it could be an EOA on L1. -::: - -### Message Boxes - -In a logical sense, a Message Box functions as a one-way message passing mechanism with two ends, one residing on each side of the divide, i.e., one component on L1 and another on L2. Essentially, these boxes are utilized to transmit messages between L1 and L2 via the rollup contract. The boxes can be envisaged as multi-sets that enable the same message to be inserted numerous times, a feature that is necessary to accommodate scenarios where, for instance, "deposit 10 eth to A" is required multiple times. The diagram below provides a detailed illustration of how one can perceive a message box in a logical context. - - - -- Here, a `sender` will insert a message into the `pending` set, the specific constraints of the actions depend on the implementation domain, but for now, say that anyone can insert into the pending set. -- At some point, a rollup will be executed, in this step messages are "moved" from pending on Domain A, to ready on Domain B. Note that consuming the message is "pulling & deleting" (or nullifying). The action is atomic, so a message that is consumed from the pending set MUST be added to the ready set, or the state transition should fail. A further constraint on moving messages along the way, is that only messages where the `sender` and `recipient` pair exists in a leaf in the contracts tree are allowed! -- When the message have been added to the ready set, the `recipient` can consume the message as part of a function call. - -Something that might seem weird when compared to other cross-chain setups, is that we are "pulling" messages, and that the message don't need to be calldata for a function call. For _Arbitrum_ and the like, execution is happening FROM the "message bridge", which then calls the L1 contract. For us, you call the L1 contract, and it should then consume messages from the message box. -Why? _Privacy_! When pushing, we would be needing full `calldata`. Which for functions with private inputs is not really something we want as that calldata for L1 -> L2 transactions are committed to on L1, e.g., publicly sharing the inputs to a private function. - -By instead pulling, we can have the "message" be something that is derived from the arguments instead. This way, a private function to perform second half of a deposit, could leak the "value" deposited and "who" made the deposit (as this is done on L1), but the new owner can be hidden on L2. - -To support messages in both directions we logically require two of these message boxes (one in each direction), and then message passing between L1 and L2 is supported! However, due to the limitations of each domain, the message box for sending messages into the rollup and sending messages out are not fully symmetrical. In reality, the setup looks closer to the following: - - - -:::info -The L2 -> L1 pending messages set only exist logically, as it is practically unnecessary. For anything to happen to the L2 state (e.g., update the pending messages), the state will be updated on L1, meaning that we could just as well insert the messages directly into the ready set. -::: - -### Rollup Contract - -The rollup contract has a few very important responsibilities. The contract must keep track of the _L2 rollup state root_, perform _state transitions_ and ensure that the data is available for anyone else to synchronize to the current state. - -To ensure that _state transitions_ are performed correctly, the contract will derive public inputs for the **rollup circuit** based on the input data, and then use a _verifier_ contract to validate that inputs correctly transition the current state to the next. All data needed for the public inputs to the circuit must be from the rollup block, ensuring that the block is available. For a valid proof, the _rollup state root_ is updated and it will emit an _event_ to make it easy for anyone to find the data by event spotting. - -As part of _state transitions_ where cross-chain messages are included, the contract must "move" messages along the way, e.g., from "pending" to "ready". - -### Kernel Circuit - -For L2 to L1 messages, the public inputs of a user-proof will contain a dynamic array of messages to be added, of size at most `MAX_MESSAGESTACK_DEPTH`, limited to ensure it is not impossible to include the transaction. The circuit must ensure, that all messages have a `sender/recipient` pair, and that those pairs exist in the contracts tree and that the `sender` is the L2 contract that actually emitted the message. -For consuming L1 to L2 messages the circuit must create proper nullifiers. - -### Rollup Circuit - -The rollup circuit must ensure that, provided two states $S$ and $S'$ and the rollup block $B$, applying $B$ to $S$ using the transition function must give us $S'$, e.g., $T(S, B) \mapsto S'$. If this is not the case, the constraints are not satisfied. - -For the sake of cross-chain messages, this means inserting and nullifying L1 $\rightarrow$ L2 in the trees, and publish L2 $\rightarrow$ L1 messages on chain. These messages should only be inserted if the `sender` and `recipient` match an entry in the contracts leaf (as checked by the kernel). - -### Messages - -While a message could theoretically be arbitrarily long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. -In this manner, there is some way to "unpack" the content on the receiving domain. - -The message that is passed along, require the `sender/recipient` pair to be communicated as well (we need to know who should receive the message and be able to check). By having the pending messages be a contract on L1, we can ensure that the `sender = msg.sender` and let only `content` and `recipient` be provided by the caller. Summing up, we can use the struct's seen below, and only store the commitment (`sha256(LxToLyMsg)`) on chain or in the trees, this way, we need only update a single storage slot per message. - -```solidity -struct L1Actor { - address: actor, - uint256: chainId, -} - -struct L2Actor { - bytes32: actor, - uint256: version, -} - -struct L1ToL2Msg { - L1Actor: sender, - L2Actor: recipient, - bytes32: content, - bytes32: secretHash, -} - -struct L2ToL1Msg { - L2Actor: sender, - L1Actor: recipient, - bytes32: content, -} -``` - -:::info -The `bytes32` elements for `content` and `secretHash` hold values that must fit in a field element (~ 254 bits). -::: - -:::info -The nullifier computation should include the index of the message in the message tree to ensure that it is possible to send duplicate messages (e.g., 2 x deposit of 500 dai to the same account). - -To make it possible to hide when a specific message is consumed, the `L1ToL2Msg` is extended with a `secretHash` field, where the `secretPreimage` is used as part of the nullifier computation. This way, it is not possible for someone just seeing the `L1ToL2Msg` on L1 to know when it is consumed on L2. -::: - -## Combined Architecture - -The following diagram shows the overall architecture, combining the earlier sections. - - - -## Linking L1 and L2 contracts - -As mentioned earlier, there will be a link between L1 and L2 contracts (with the L1 part of the link being the portal contract), this link is created at "birth" when the contract leaf is inserted. However, the specific requirements of the link is not yet fully decided. And we will outline a few options below. - -The reasoning behind having a link, comes from the difficulty of L2 access control (see "A note on L2 access control"). By having a link that only allows 1 contract (specified at deployment) to send messages to the L2 contract makes this issue "go away" from the application developers point of view as the message could only come from the specified contract. The complexity is moved to the protocol layer, which must now ensure that messages to the L2 contract are only sent from the specified L1 contract. - -:::info -The design space for linking L1 and L2 contracts is still open, and we are looking into making access control more efficient to use in the models. -::: - -### One L2 contract linking to one L1 - -One option is to have a 1:1 link between L1 and L2 contracts. This would mean that the L2 contract would only be able to receive messages from the specified L1 contract but also that the L1 should only be able to send messages to the specified L2 contract. This model is very restrictive, but makes access control easy to handle (but with no freedom). - -It is possible to model many-to-many relationships through implementing "relays" and listing those. However, L2 contracts that want to use the relay would have to either use dynamic access control to ensure that messages are coming from the relayer and that they where indeed relayed from the correct L1 contract. Essentially back in a similar case to no links. - -To enforce the restriction, the circuit must ensure that neither of the contracts have been used in any other links. Something that in itself gives us a few problems on frontrunning, but could be mitigated with a handshake between the L1 and L2 contract. - -### Many L2 contracts linking to one L1 - -From the L2 contract receiving messages, this model is very similar to the 1:1, only one L1 contract could be the sender of messages so no extra work needed there. On the L1 side of things, as many L2 could be sending messages to the L1 contract, we need to be able to verify that the message is coming from the correct L2 contract. However, this can be done using easy access control in the form of storage variables on the L1 contract, moving the design-space back to something that closely resembles multi-contract systems on L1. - -When the L1 contract can itself handle where messages are coming from (it could before as well but useless as only 1 address could send), we don't need to worry about it being in only a single pair. The circuits can therefore simply insert the contract leafs without requiring it to ensure that neither have been used before. - -With many L2's reading from the same L1, we can also more easily setup generic bridges (with many assets) living in a single L1 contract but minting multiple L2 assets, as the L1 contract can handle the access control and the L2's simply point to it as the portal. This reduces the complexity of the L2 contracts as all access control is handled by the L1 contract. - -## Open Questions - -- Can we handle L2 access control without public function calls? - - Essentially, can we have "private shared state" that is updated very sparingly but where we accept the race-conditions as they are desired in specific instances. -- What is the best way to handle "linking", with efficient access control, could use this directly. -- What is the best way to handle messages in a multi-rollup system? E.g., rollup upgrade is rejected by some part of users that use the old rollup. - - What happens to pending messages (sent on old system then upgrade)? - - Should both versions push messages into same message boxes? - - How should users or developers signal what versions their contracts respects as the "current" version? diff --git a/docs/docs/aztec/aztec/concepts/smart_contracts/communication/index.md b/docs/docs/aztec/aztec/concepts/smart_contracts/communication/index.md index 3687c9e0374..4172cdea96d 100644 --- a/docs/docs/aztec/aztec/concepts/smart_contracts/communication/index.md +++ b/docs/docs/aztec/aztec/concepts/smart_contracts/communication/index.md @@ -3,8 +3,10 @@ title: Contract Communication sidebar_position: 5 --- -This section will walk over communication types that behaves differently than normal function calls. +## Private / Public execution -Namely, if functions are in different domains, private vs. public, their execution behaves a little differently to what you might expect! See [Private \<--\> Public execution](./public_private_calls.md). +See [Private \<--\> Public execution](./public_private_calls.md) for information about how Aztec contracts pass information between these execution contexts. -Likewise, executing a function on a different domain than its origin needs a bit extra thought. See [L1 \<--\> L2 communication](./cross_chain_calls.md). +## Cross-chain communication + +Read the protocol specification about Cross-chain communication between Aztec and Ethereum [here](../../../../../protocol-specs/l1-smart-contracts/index.md). diff --git a/docs/docs/aztec/aztec/concepts/smart_contracts/functions/context.md b/docs/docs/aztec/aztec/concepts/smart_contracts/functions/context.md index 6bbd70e287c..a2d44f40d80 100644 --- a/docs/docs/aztec/aztec/concepts/smart_contracts/functions/context.md +++ b/docs/docs/aztec/aztec/concepts/smart_contracts/functions/context.md @@ -126,7 +126,7 @@ The public call stack contains all of the external function calls that are creat ### New L2 to L1 msgs -New L2 to L1 messages contains messages that are delivered to the [l1 outbox](/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls) on the execution of each rollup. +New L2 to L1 messages contains messages that are delivered to the [l1 outbox](/protocol-specs/l1-smart-contracts/index.md) on the execution of each rollup. ## Public Context diff --git a/docs/docs/aztec/aztec/core_components.md b/docs/docs/aztec/aztec/core_components.md index f4a88fbc3e2..acaa0857820 100644 --- a/docs/docs/aztec/aztec/core_components.md +++ b/docs/docs/aztec/aztec/core_components.md @@ -1,6 +1,6 @@ --- title: Core Components -sidebar_position: 2 +sidebar_position: 1 --- Aztec Labs is building a layer 2 rollup on Ethereum focused on 3 things: @@ -39,7 +39,7 @@ A user of the Aztec network will interact with the network through Aztec.js. Azt ### Private Execution Environment -The PXE provides a secure environment for the execution of sensitive operations, ensuring private information and decrypted data are not accessible to unauthorized applications. It hides the details of the [state model](/aztec/aztec/concepts/state_model/index.md) from end users, but the state model is important for Aztec developers to understand as it has implications for [private/public execution](/aztec/aztec/concepts/smart_contracts/communication/public_private_calls.md) and [L1/L2 communication](/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls). The PXE also includes the [ACIR Simulator](/aztec/aztec/concepts/pxe/acir_simulator.md) for private executions and the KeyStore for secure key management. +The PXE provides a secure environment for the execution of sensitive operations, ensuring private information and decrypted data are not accessible to unauthorized applications. It hides the details of the [state model](/aztec/aztec/concepts/state_model/index.md) from end users, but the state model is important for Aztec developers to understand as it has implications for [private/public execution](/aztec/aztec/concepts/smart_contracts/communication/public_private_calls.md) and [L1/L2 communication](../../protocol-specs/l1-smart-contracts/index.md). The PXE also includes the [ACIR Simulator](/aztec/aztec/concepts/pxe/acir_simulator.md) for private executions and the KeyStore for secure key management. Procedurally, the PXE sends results of private function execution and requests for public function executions to the [sequencer](/aztec/aztec/concepts/nodes_clients/sequencer), which will update the state of the rollup. diff --git a/docs/docs/guides/guides/smart_contracts/writing_contracts/how_to_emit_event.md b/docs/docs/guides/guides/smart_contracts/writing_contracts/how_to_emit_event.md index 74f72772787..913497e3249 100644 --- a/docs/docs/guides/guides/smart_contracts/writing_contracts/how_to_emit_event.md +++ b/docs/docs/guides/guides/smart_contracts/writing_contracts/how_to_emit_event.md @@ -19,26 +19,15 @@ Unlike on Ethereum, there are 2 types of events supported by Aztec: [encrypted]( Encrypted events can only be emitted by private functions and are encrypted using a public key of a recipient. For this reason it is necessary to register a recipient in the Private Execution Environment (PXE) before encrypting the events for them. -Recipients can be registered using Aztec.js: - -```ts -const aztecAddress = AztecAddress.fromString( - "0x147392a39e593189902458f4303bc6e0a39128c5a1c1612f76527a162d36d529" -); -const publicKey = Point.fromString( - "0x26e193aef4f83c70651485b5526c6d01a36d763223ab24efd1f9ff91b394ac0c20ad99d0ef669dc0dde8d5f5996c63105de8e15c2c87d8260b9e6f02f72af622" -); -const partialAddress = Fr.fromString( - "0x200e9a6c2d2e8352012e51c6637659713d336405c29386c7c4ac56779ab54fa7" -); - -const completeAddress = new CompleteAddress( - aztecAddress, - publicKey, - partialKey -); -await pxe.registerRecipient(completeAddress); -``` + +First we need to get a hold of recipient's [complete address](#complete-address). +Bellow are some ways how we could instantiate it after getting the information in a string form from a recipient: + +#include_code instantiate-complete-address /yarn-project/circuits.js/src/structs/complete_address.test.ts rust + +Then to register the recipient's [complete address](/aztec/aztec/concepts/accounts/keys.md#complete-address) in PXE we would call `registerRecipient` PXE endpoint using [Aztec.js](/aztec/aztec/core_components.md#aztecjs): + +#include_code register-recipient /yarn-project/aztec.js/src/wallet/create_recipient.ts rust :::info If a note recipient is one of the accounts inside the PXE, we don't need to register it as a recipient because we already have the public key available. You can register a recipient as shown [here](../how_to_deploy_contract.md) diff --git a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md b/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md index 5cb71774cfe..31ef4735f7c 100644 --- a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md +++ b/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md @@ -2,7 +2,7 @@ title: Communicating with L1 --- -Is this your first time hearing the word `Portal`? Check out the [concepts section](/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls). +Is this your first time hearing the word `Portal`? Check out the [concepts section](../../../../../protocol-specs/l1-smart-contracts/index.md). Follow the [token bridge tutorial](/tutorials/tutorials/contract_tutorials/advanced/token_bridge) for hands-on experience writing and deploying a Portal contract. @@ -14,18 +14,18 @@ The `Inbox` can be seen as a mailbox to the rollup, portals put messages into th When sending messages, we need to specify quite a bit of information beyond just the content that we are sharing. Namely we need to specify: -| Name | Type | Description | -| ----------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| Recipient | `L2Actor` | The message recipient. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use [`computeSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec.js/src/utils/secrets.ts) to compute it from a secret. | -| Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) +| Name | Type | Description | +| ----------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Recipient | `L2Actor` | The message recipient. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | +| Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use `computeSecretHash` to compute it from a secret. | +| Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) | With all that information at hand, we can call the `sendL2Message` function on the Inbox. The function will return a `field` (inside `bytes32`) that is the hash of the message. This hash can be used as an identifier to spot when your message has been included in a rollup block. #include_code send_l1_to_l2_message l1-contracts/src/core/interfaces/messagebridge/IInbox.sol solidity -As time passes, a sequencer will consume the message batch your message was included in and include it in a their block. -Upon inclusion, it is made available to be consumed on L2. +A sequencer will consume the message batch your message was included in and include it in their block. +Upon inclusion, it is made available to be consumed on L2 via the L2 outbox. To consume the message, we can use the `consume_l1_to_l2_message` function within the `context` struct. @@ -40,7 +40,9 @@ Note that while the `secret` and the `content` are both hashed, they are actuall #include_code context_consume_l1_to_l2_message /noir-projects/aztec-nr/aztec/src/context/private_context.nr rust -Computing the `content` must be done manually in its current form, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example](https://github.com/AztecProtocol/aztec-packages/blob/master/noir-projects/noir-contracts/contracts/token_bridge_contract/src/util.nr). +### Token bridge example + +Computing the `content` must currently be done manually, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example](../../../../../tutorials/tutorials/contract_tutorials/advanced/token_bridge/minting_on_aztec.md#consume-the-l1-message). #include_code claim_public /noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr rust @@ -48,8 +50,12 @@ Computing the `content` must be done manually in its current form, as we are sti The `content_hash` is a sha256 truncated to a field element (~ 254 bits). In Aztec-nr, you can use our `sha256_to_field()` to do a sha256 hash which fits in one field element ::: +### Token portal hash library + #include_code mint_public_content_hash_nr /noir-projects/noir-contracts/contracts/token_portal_content_hash_lib/src/lib.nr rust +### Token Portal contract + In Solidity, you can use our `Hash.sha256ToField()` method: #include_code content_hash_sol_import l1-contracts/test/portals/TokenPortal.sol solidity @@ -60,17 +66,19 @@ The `secret_hash` uses the pederson hash which fits in a field element. You can After the transaction has been mined, the message is consumed, a nullifier is emitted and the tokens have been minted on Aztec and are ready for claiming. -Since the message consumption is emitting a nullifier the same message cannot be consumed again. The index in the message tree is used as part of the nullifier computation, ensuring that the same content and secret being inserted will be distinct messages that can each be consumed. Without the index in the nullifier, it would be possible to perform a kind of attack known as `Faerie Gold` attacks where two seemingly good messages are inserted, but only one of them can be consumed later. +Since the message consumption is emitting a nullifier, the same message cannot be consumed again. The index in the message tree is used as part of the nullifier computation, ensuring that the same content and secret being inserted will be distinct messages that can each be consumed. Without the index in the nullifier, it would be possible to perform a kind of attack known as `Faerie Gold` attacks where two seemingly good messages are inserted, but only one of them can be consumed later. ## Passing data to L1 To pass data to L1, we use the `Outbox`. The `Outbox` is the mailbox for L2 to L1 messages. This is the location on L1 where all the messages from L2 will live, and where they can be consumed from. -Similarly to messages going to L2 from L1, a message can only be consumed by the recipient, however note that it is up to the portal contract to ensure that the sender is as expected! +:::danger + +Similarly to messages going to L2 from L1, a message can only be consumed by the specified recipient. But it is up to the portal contract to ensure that the sender is as expected! Any L2 contract can send a message to a portal contract on L1, but the portal contract should only consume messages from the expected sender. -Recall that we mentioned the Aztec contract specifies what portal it is attached to at deployment. This value is stored in the rollup's contract tree, hence these links are not directly readable on L1. Also, it is possible to attach multiple aztec contracts to the same portal. +::: -The portal must ensure that the sender is as expected. One way to do this is to compute the addresses before deployment and store them as constants in the contract. However, a more flexible solution is to have an `initialize` function in the portal contract which can be used to set the address of the Aztec contract. In this model, the portal contract can check that the sender matches the value it has in storage. +The portal must ensure that the sender is as expected. One flexible solution is to have an `initialize` function in the portal contract which can be used to set the address of the Aztec contract. In this model, the portal contract can check that the sender matches the value it has in storage. To send a message to L1 from your Aztec contract, you must use the `message_portal` function on the `context`. When messaging to L1, only the `content` is required (as a `Field`). @@ -82,17 +90,25 @@ When sending a message from L2 to L1 we don't need to pass in a secret. Access control on the L1 portal contract is essential to prevent consumption of messages sent from the wrong L2 contract. ::: +### Token bridge + As earlier, we can use a token bridge as an example. In this case, we are burning tokens on L2 and sending a message to the portal to free them on L1. #include_code exit_to_l1_private noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr rust -When the transaction is included in a rollup block the message will be inserted into the `Outbox`, where the recipient portal can consume it from. When consuming, the `msg.sender` must match the `recipient` meaning that only portal can actually consume the message. +When the transaction is included in a rollup block and published to Ethereum the message will be inserted into the `Outbox` on Ethereum, where the recipient portal can consume it from. When consuming, the `msg.sender` must match the `recipient` meaning that only portal can actually consume the message. #include_code l2_to_l1_msg l1-contracts/src/core/libraries/DataStructures.sol solidity +#### Outbox `consume` + #include_code outbox_consume l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol solidity -As noted earlier, the portal contract should check that the sender is as expected. In the example below, we support only one sender contract (stored in `l2TokenAddress`) so we can just pass it as the sender, that way we will only be able to consume messages from that contract. If multiple senders are supported, you could use a have `mapping(address => bool) allowed` and check that `allowed[msg.sender]` is `true`. +#### Withdraw + +As noted earlier, the portal contract should check that the sender is as expected. In the example below, we support only one sender contract (stored in `l2TokenAddress`) so we can just pass it as the sender, that way we will only be able to consume messages from that contract. + +It is possible to support multiple senders from L2. You could use a have `mapping(address => bool) allowed` and check that `allowed[msg.sender]` is `true`. #include_code token_portal_withdraw l1-contracts/test/portals/TokenPortal.sol solidity @@ -100,7 +116,7 @@ As noted earlier, the portal contract should check that the sender is as expecte ### Structure of messages -The application developer should consider creating messages that follow a function call structure e.g., using a function signature and arguments. This will make it easier to prevent producing messages that could be misinterpreted by the recipient. +Application developers should consider creating messages that follow a function call structure e.g., using a function signature and arguments. This will make it easier to prevent producing messages that could be misinterpreted by the recipient. An example of a bad format would be using `amount, token_address, recipient_address` as the message for a withdraw function and `amount, token_address, on_behalf_of_address` for a deposit function. Any deposit could then also be mapped to a withdraw or vice versa. @@ -123,7 +139,7 @@ bytes memory message abi.encodeWithSignature( ### Error Handling -Handling error when moving cross chain can quickly get tricky. Since the L1 and L2 calls are practically async and independent of each other, the L1 part of a deposit might execute just fine, with the L2 part failing. If this is not handled well, the funds may be lost forever! The contract builder should therefore consider ways their application can fail cross chain, and handle all cases explicitly. +Handling error when moving cross chain can quickly get tricky. Since the L1 and L2 calls are async and independent of each other, the L1 part of a deposit might execute just fine, with the L2 part failing. If this is not handled well, the funds may be lost forever! Developers should consider ways their application can fail cross chain, and handle all cases explicitly. First, entries in the outboxes **SHOULD** only be consumed if the execution is successful. For an L2 -> L1 call, the L1 execution can revert the transaction completely if anything fails. As the tx is atomic, the failure also reverts consumption. @@ -135,16 +151,13 @@ Generally it is good practice to keep cross-chain calls simple to avoid too many Error handling for cross chain messages is handled by the application contract and not the protocol. The protocol only delivers the messages, it does not ensure that they are executed successfully. ::: - ### Designated caller Designating a caller grants the ability to specify who should be able to call a function that consumes a message. This is useful for ordering of batched messages. -When performing multiple cross-chain calls in one action it is important to consider the order of the calls. Say for example, that you want to perform a uniswap trade on L1 because you are a whale and slippage on L2 is too damn high. - -You would practically, withdraw funds from the rollup, swap them on L1, and then deposit the swapped funds back into the rollup. This is a fairly simple process, but it requires that the calls are done in the correct order. For one, if the swap is called before the funds are withdrawn, the swap will fail. And if the deposit is called before the swap, the funds might get lost! +When performing multiple cross-chain calls in one action it is important to consider the order of the calls. Say for example, that you want to perform a uniswap trade on L1. You would withdraw funds from the rollup, swap them on L1, and then deposit the swapped funds back into the rollup. This is a straightforward process, but it requires that the calls are done in the correct order (e.g. if the swap is called before the funds are withdrawn, the swap will fail). -As message boxes only will allow the recipient portal to consume the message, we can use this to our advantage to ensure that the calls are done in the correct order. Say that we include a designated "caller" in the messages, and that the portal contract checks that the caller matches the designated caller or designated is address(0) (anyone can call). When the message are to be consumed on L1, it can compute the message as seen below: +The message boxes (Inbox and Outbox) will only allow the recipient portal to consume the message, and we can use this to ensure that the calls are done in the correct order. Say that we include a designated "caller" in the messages, and that the portal contract checks that the caller matches the designated caller or designated as `address(0)` (if anyone can call). When the messages are to be consumed on L1, it can compute the message as seen below: ```solidity bytes memory message = abi.encodeWithSignature( @@ -155,12 +168,12 @@ bytes memory message = abi.encodeWithSignature( ); ``` -This way, the message can be consumed by the portal contract, but only if the caller is the designated caller. By being a bit clever when specifying the designated caller, we can ensure that the calls are done in the correct order. For the Uniswap example, say that we have token portals implemented as we have done throughout this page, and a Uniswap portal implementing the designated caller. +This way, the message can be consumed by the portal contract, but only if the caller is the specified caller. In the logic of the contract that is the designated caller, we can ensure that the calls are done in the correct order. -We require that the Uniswap portal is the caller of the withdrawal, and that the uniswap portal implementation is executing the withdrawal before the swap. -The order of execution can be constrained in the contract. Since all of the messages are emitted to L1 in the same transaction, we can leverage transaction atomicity to ensure success of failure of all messages. +For example, we could require that the Uniswap portal is the caller of the withdrawal, and ensure that the uniswap portal contract implementation is executing the withdrawal before the swap. +The order of execution can be specified in the contract. Since all of the messages are emitted to L1 in the same transaction, we can leverage transaction atomicity to ensure success of failure of all messages. -Note, that crossing the L1/L2 chasm is asynchronous, so there could be a situation where the user has burned their assets on L2 but the swap fails on L1! This could be due to major price movements or the like. In such a case, the user could be stuck with funds on L1 that they cannot get back to L2 unless the portal contract implements a way to properly handle such errors. +Note, that crossing the L1/L2 chasm is asynchronous, so there could be a situation where the user has burned their assets on L2 but the swap fails on L1! This could be due to major price movements for example. In such a case, the user could be stuck with funds on L1 that they cannot get back to L2 unless the portal contract implements a way to properly handle such errors. :::caution Designated callers are enforced at the contract level for contracts that are not the rollup itself, and should not be trusted to implement the contract correctly. The user should always be aware that it is possible for the developer to implement something that looks like designated caller without providing the abilities to the user. diff --git a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/deploy_with_portal.md b/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/deploy_with_portal.md deleted file mode 100644 index a9df5cef142..00000000000 --- a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/deploy_with_portal.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: How to deploy a contract with a Portal ---- - -- Deploy to L1 using Viem, Foundry or your preferred tool; -- Deploy to L2 and supply the L1 portal as an argument so you can store it in the contract; - ```typescript - const deploymentTx = Contract.deploy(wallet, tokenPortalAddress).send(); - ``` -- Initialize l1 with l2 address for access control. - -Follow the [token bridge tutorial](/tutorials/tutorials/contract_tutorials/advanced/token_bridge) for hands-on experience writing and deploying a Portal contract. diff --git a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/index.md b/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/index.md index f5886ebf845..f861c47664a 100644 --- a/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/index.md +++ b/docs/docs/guides/guides/smart_contracts/writing_contracts/portals/index.md @@ -4,4 +4,4 @@ title: Portals A portal is a point of contact between L1 and a contract on Aztec. For applications such as token bridges, this is the point where the tokens are held on L1 while used in L2. -As outlined in [Communication](/aztec/aztec/concepts/smart_contracts/communication/cross_chain_calls), an Aztec L2 contract is linked to _ONE_ L1 address at time of deployment (specified by the developer). This L1 address is the only address that can send messages to that specific L2 contract, and the only address that can receive messages sent from the L2 contract to L1. Note, that a portal doesn't actually need to be a contract, it could be any address on L1. +As outlined in [Communication](../../../../../protocol-specs/l1-smart-contracts/index.md), an Aztec L2 contract does not have to be linked to a portal contract, but can specify an intended portal in storage. Note, that a portal doesn't actually need to be a contract, it could be any address on L1. diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index 2873bd1fb97..209e5a0e066 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -6,9 +6,9 @@ keywords: [sandbox, cli, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. -## TBD +## 0.41.0 -### [Aztec.nr] Keys: Token note now stores an owner master nullifying public key hash instead of an owner address. +### [Aztec.nr] Keys: Token note now stores an owner master nullifying public key hash instead of an owner address i.e. @@ -23,6 +23,8 @@ struct TokenNote \{ Computing the nullifier similarly changes to use this master nullifying public key hash. +## 0.40.0 + ### [Aztec.nr] Debug logging The function `debug_log_array_with_prefix` has been removed. Use `debug_log_format` with `{}` instead. The special sequence `{}` will be replaced with the whole array. You can also use `{0}`, `{1}`, ... as usual with `debug_log_format`. diff --git a/docs/docs/protocol-specs/contract-deployment/instances.md b/docs/docs/protocol-specs/contract-deployment/instances.md index 84b3048be40..1e104554cc5 100644 --- a/docs/docs/protocol-specs/contract-deployment/instances.md +++ b/docs/docs/protocol-specs/contract-deployment/instances.md @@ -23,7 +23,6 @@ The structure of a contract instance is defined as: | `deployer` | `AztecAddress` | Optional address of the deployer of the contract. | | `contract_class_id` | `Field` | Identifier of the contract class for this instance. | | `initialization_hash` | `Field` | Hash of the selector and arguments to the constructor. | -| `portal_contract_address` | `EthereumAddress` | Optional address of the L1 portal contract. | | `public_keys_hash` | `Field` | Optional hash of the struct of public keys used for encryption and nullifying by this contract. | diff --git a/docs/docs/protocol-specs/l1-smart-contracts/index.md b/docs/docs/protocol-specs/l1-smart-contracts/index.md index c174b57d25d..c3b2c9e99f5 100644 --- a/docs/docs/protocol-specs/l1-smart-contracts/index.md +++ b/docs/docs/protocol-specs/l1-smart-contracts/index.md @@ -12,6 +12,7 @@ The purpose of the L1 contracts are simple: - Facilitate cross-chain communication such that L1 liquidity can be used on L2 - Act as a validating light node for L2 that every L1 node implicitly run + ::: ## Overview @@ -230,26 +231,24 @@ This way, multiple rollup instances can use the same inbox/outbox contracts. :::info Why a single hash? Compute on L1 is expensive, but storage is extremely expensive! To reduce overhead, we trade storage for computation and only commit to the messages and then "open" these for consumption later. -However, since computation also bears significant we need to use a hash function that is relatively cheap on L1, while still being doable inside a snark. -For this purpose a modded SHA256 was chosen, modded here meaning that it fits the output value into a single field element using the modulo operator. +However, since computation also bears significant cost we need to use a hash function that is relatively cheap on L1, while still being doable inside a snark. +For this purpose a modified SHA256 was chosen, modified by fitting the output value into a single field element using the modulo operator. ::: -Some additional discussion/comments on the message structure can be found in [The Republic](https://forum.aztec.network/t/the-republic-a-flexible-optional-governance-proposal-with-self-governed-portals/609/2#supporting-pending-messages-5). +Some additional discussion/comments on the message structure can be found in the forum post, [The Republic](https://forum.aztec.network/t/the-republic-a-flexible-optional-governance-proposal-with-self-governed-portals/609/2#supporting-pending-messages-5). -Since any data that is moving from one chain to the other at some point will live on L1, it will be PUBLIC. -While this is fine for L1 consumption (which is public in itself), we want to ensure that the L2 consumption can be private. +Since any data that is moving from one chain to the other at some point will live on L1, it will be public. While this is fine for L1 consumption (which is always public), we want to ensure that the L2 consumption can be private. To support this, we use a nullifier scheme similar to what we are doing for the other [notes](./../state/note-hash-tree.md). -As part of the nullifier computation we then use the `secret` which hashes to the `secretHash`, this ensures that only actors with knowledge of `secret` will be able to see when it is spent on L2. +As part of the nullifier computation we use a `secret` which hashes to a `secretHash`, which ensures that only actors with knowledge of the `secret` will be able to see when it is spent on L2. Any message that is consumed on one side MUST be moved to the other side. This is to ensure that the messages exist AND are only consumed once. -The L1 contracts can handle one side, but the circuits must handle the other. +The L1 contracts handle one side and the circuits must handle the other. :::info Is `secretHash` required? -We are using the `secretHash` to ensure that the user can spend the message privately with a generic nullifier computation. -However, as the nullifier computation is almost entirely controlled by the app circuit (except the siloing, see [Nullifier Tree](./../state/nullifier-tree.md) ) applications could be made to simply use a different nullifier computation and have it become part of the content. -However, this reduces the developer burden and is quite easy to mess up. -For those reasons we have decided to use the `secretHash` as part of the message. +We are using the `secretHash` to ensure that the user can spend the message privately with a nullifier computation. +However, as the nullifier computation is almost entirely controlled by the Aztec contract (the application circuit, except the contract siloing - see [Nullifier Tree](./../state/nullifier-tree.md)). Contracts could compute a custom nullifier to have the `secretHash` included as part of the computation. +However, the chosen approach reduces the developer burden and reduces the likelihood of mistakes. ::: @@ -259,53 +258,43 @@ For those reasons we have decided to use the `secretHash` as part of the message When we say inbox, we are generally referring to the L1 contract that handles the L1 to L2 messages. The inbox takes messages from L1 contracts and inserts them into a series of message trees. -We build multiple "trees" instead of a single tree, since we are building one tree for every block and not one large with all the messages. +We build multiple "trees" instead of a single tree, since we are building one tree per block and not one large tree with all messages for all blocks. -The reasoning is fairly straight-forward; we need to split it into epochs such that a sequencer can build a proof based on a tree that is not going to update in the middle of the proof building. Such updates would allow DOS attacks on the sequencer, which is undesirable. +We need to split trees into epochs such that a sequencer can build a proof based on a tree that is not going to update in the middle of the proof building. Having one tree that updates across blocks would allow DOS attacks on the sequencer, which is undesirable. -To support this, we can simply introduce a "lag" between when trees are built and when they must be included. -We can actually do this quite easily. -Say that whenever a new block is published, we start building a new tree. -Essentially meaning that at block $n$ we include tree $n$ which was created earlier (during block $n-1$). +In practice, we introduce a "lag" between when trees are built and when they must be included. Whenever a new block is published, we start building a new tree, essentially meaning that at block $n$ we include tree $n$ which was created earlier (during block $n-1$). -Example visualized below. -Here we have that tree $n$ is "fixed" when block $n$ needs to be published. -And that tree $n+1$ is being built upon until block $n$ is being published. +Below, tree $n$ is "fixed" when block $n$ needs to be published. Tree $n+1$ is being built upon until block $n$ is published. ![Feeding trees into the blocks](/img/protocol-specs/l1-smart-contracts/tree-order.png) -When the state transitioner is consuming a tree, it MUST insert the subtree into the "L2 outbox" ([message tree](./../state/index.md)). +When the state transitioner processes a tree, it MUST insert the subtree into the "L2 outbox" ([message tree](./../state/index.md) included in global state). When a message is inserted into the inbox, the inbox **MUST** fill in the `sender`: - `L1Actor.actor`: The sender of the message (the caller), `msg.sender` - `L1Actor.chainId`: The chainId of the L1 chain sending the message, `block.chainId` -We MUST populate these values in the inbox, since we cannot rely on the user providing anything meaningful. +We MUST populate these values in the inbox, since we cannot rely on user input. From the `L1ToL2Msg` we compute a hash of the message. This hash is what is moved by the state transitioner to the L2 outbox. -Since message from L1 to L2 can be inserted independently of the L2 block, the message transfer (moving from inbox into outbox) is not synchronous as it is for L2 to L1 messages. +Since message from L1 to L2 can be inserted independently of the L2 block, the message transfer (moving from L1 inbox into L2 outbox) is not synchronous as it is for L2 to L1 messages. This means that the message can be inserted into the inbox, but not yet moved to the outbox. -The message will then be moved to the outbox when the state transitioner is consuming the message as part of a block. -Since the sequencers are required to move the entire subtree at once, you can be sure that the message will be moved to the outbox at some point. - -As mentioned earlier, this is done to ensure that the messages are not used to DOS the state transitioner. +The message will be moved to the outbox when the state transitioner processes the message as part of a block. +Since sequencers are required to move the entire subtree at once, you can be sure that the message will be moved to the outbox. As mentioned earlier, segmenting updates is done to ensure that the messages are not used to DOS the state transitioner. -Since we will be building the tree on L1, we need to use a gas-friendly hash-function such as SHA256. -However, as we need to allow users to prove inclusion in this tree, we cannot just insert the SHA256 tree into the rollup state, it requires too many constraints to be used by most small users. -Therefore, we need to "convert" the tree into a tree using a more snark-friendly hash. +The message tree is built on L1, so we need to use a gas-friendly hash-function such as SHA256. +However, we need to allow users to prove inclusion in this tree, so we cannot just insert the SHA256 tree into the rollup state, since it expensive to process in a zk circuit. +Therefore, we need to "convert" the SHA256 tree into a tree that uses a more snark-friendly hash. This part is done in the [tree parity circuits](./../rollup-circuits/tree-parity.md). -Furthermore, to build the tree on L1, we need to put some storage on L1 such that the insertions don't need to provide a lot of merkle-related data which could be cumbersome to do and prone to race-conditions. -For example two insertions based on inclusion paths that are created at the same time will invalidate each other. -As storage costs an arm and a leg on L1, we need to be careful with how we store this. +Furthermore, to build the tree on L1, we can optimize storage on L1 such that the insertions don't require a lot of merkle tree related data which could be cumbersome and prone to race-conditions (e.g., two insertions based on inclusion paths that are created at the same time will invalidate each other). -Luckily for us, we can use a "frontier" merkle tree to store the messages. -This is a special kind of append-only merkle tree that allows us to store very few elements in storage, but just enough for us to be able to extend it, and compute the root of the tree. -Consult [Frontier Merkle Tree](#frontier-merkle-tree]) for more information on this. +The solution is to use a "frontier" merkle tree to store the messages. +This is a special kind of append-only merkle tree that allows us to store very few elements in storage, while still being able to extend it and compute the root of the tree. See the [Frontier Merkle Tree](#frontier-merkle-tree]) for more information on this. -Assuming that we have these trees, we can build an `inbox` utilizing them as follows. +Assuming that we have these trees, we can build an `Inbox` as follows. When a new block is published, we start building a new tree. Notice however, that if we have entirely filled the current tree, we can start building a new one immediately, and the blocks can then "catch up". @@ -367,19 +356,18 @@ class Inbox: #### L2 Inbox -While the L2 inbox is not a real contract, it is a logical contract that apply mutations to the data similar to the L1 inbox to ensure that the sender cannot fake his position. -This logic is handled by the kernel and rollup circuits. +While the L2 inbox is not a contract, it is a logical concept that apply mutations to the data similar to the L1 inbox to ensure that the sender cannot fake his position. This logic is handled by the kernel and rollup circuits. Just like the L1 variant, we must populate the `sender`: - `L2Actor.actor`: The sender of the message (the caller) - `L2Actor.version`: The version of the L2 chain sending the message -In practice, this is done in the kernel circuit of the L2, and the message hashes are then aggregated into a tree as outlined in the [Rollup Circuits section](./../rollup-circuits/index.md) before it is inserted into the L1 outbox which we will address now. +In practice, this is done in the kernel circuit of the L2, and the message hashes are then aggregated into a tree as outlined in the [Rollup Circuits section](./../rollup-circuits/index.md) before it is inserted into the L1 outbox. ### Outbox -The outboxes are the location where a user can consume messages from. +The outboxes are the location where a user can consume messages from on the destination chain. An outbox can only contain elements that have previously been removed from the paired inbox. 1 sha256 hash -> 31 bytes -> 1 fields | Beware when populating bytes that we fill (prepend) to 32! // 1 encrypted logs hash --> 1 sha256 hash -> 31 bytes -> 1 fields | Beware when populating bytes that we fill (prepend) to 32! // 1 unencrypted logs hash --> 1 sha256 hash -> 31 bytes -> 1 fields | Beware when populating bytes that we fill (prepend) to 32! let mut txs_effects_hash_input = [0; TX_EFFECTS_HASH_INPUT_FIELDS]; @@ -150,6 +151,7 @@ pub fn compute_tx_effects_hash( let new_nullifiers = combined.new_nullifiers; let new_l2_to_l1_msgs = combined.new_l2_to_l1_msgs; let public_data_update_requests = combined.public_data_update_requests; + let note_encrypted_logs_hash = combined.note_encrypted_logs_hash; let encrypted_logs_hash = combined.encrypted_logs_hash; let unencrypted_logs_hash = combined.unencrypted_logs_hash; @@ -185,6 +187,10 @@ pub fn compute_tx_effects_hash( } offset += MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * 2; + txs_effects_hash_input[offset] = note_encrypted_logs_hash; + + offset += NUM_ENCRYPTED_LOGS_HASHES_PER_TX; + txs_effects_hash_input[offset] = encrypted_logs_hash; offset += NUM_ENCRYPTED_LOGS_HASHES_PER_TX; @@ -217,6 +223,7 @@ fn consistent_TX_EFFECTS_HASH_INPUT_FIELDS() { + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * 2 + MAX_NEW_L2_TO_L1_MSGS_PER_TX + NUM_ENCRYPTED_LOGS_HASHES_PER_TX + + NUM_ENCRYPTED_LOGS_HASHES_PER_TX + NUM_UNENCRYPTED_LOGS_HASHES_PER_TX; assert(TX_EFFECTS_HASH_INPUT_FIELDS == expected_size, "tx effects hash input size is incorrect"); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr index 878bb933b1f..8c12f9caeb1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr @@ -13,6 +13,7 @@ mod combined_constant_data; mod side_effect; mod read_request; +mod log_hash; mod note_hash; mod nullifier; mod nullifier_key_validation_request; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr index 452cac06dc0..1c224a9d35f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr @@ -1,5 +1,5 @@ use crate::{ - hash::compute_tx_logs_hash, + hash::{compute_tx_logs_hash, compute_tx_note_logs_hash}, abis::{ accumulated_data::public_accumulated_data::PublicAccumulatedData, note_hash::NoteHash, nullifier::Nullifier, public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect, @@ -17,6 +17,7 @@ struct CombinedAccumulatedData { new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], + note_encrypted_logs_hash: Field, encrypted_logs_hash: Field, unencrypted_logs_hash: Field, @@ -33,6 +34,12 @@ struct CombinedAccumulatedData { impl CombinedAccumulatedData { pub fn combine(non_revertible: PublicAccumulatedData, revertible: PublicAccumulatedData) -> Self { // TODO(Miranda): Hash here or elsewhere? + let note_encrypted_logs_hash = compute_tx_note_logs_hash( + array_merge( + non_revertible.note_encrypted_logs_hashes, + revertible.note_encrypted_logs_hashes + ) + ); let encrypted_logs_hash = compute_tx_logs_hash( array_merge( non_revertible.encrypted_logs_hashes, @@ -52,6 +59,7 @@ impl CombinedAccumulatedData { non_revertible.new_l2_to_l1_msgs, revertible.new_l2_to_l1_msgs ), + note_encrypted_logs_hash, encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: non_revertible.encrypted_log_preimages_length @@ -73,6 +81,7 @@ impl Empty for CombinedAccumulatedData { new_note_hashes: [0; MAX_NEW_NOTE_HASHES_PER_TX], new_nullifiers: [0; MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [0; MAX_NEW_L2_TO_L1_MSGS_PER_TX], + note_encrypted_logs_hash: 0, encrypted_logs_hash: 0, unencrypted_logs_hash: 0, encrypted_log_preimages_length: 0, @@ -90,6 +99,7 @@ impl Serialize for CombinedAccumulatedData { fields.extend_from_array(self.new_note_hashes); fields.extend_from_array(self.new_nullifiers); fields.extend_from_array(self.new_l2_to_l1_msgs); + fields.push(self.note_encrypted_logs_hash); fields.push(self.encrypted_logs_hash); fields.push(self.unencrypted_logs_hash); fields.push(self.encrypted_log_preimages_length); @@ -115,6 +125,7 @@ impl Deserialize for CombinedAccumulatedData { new_note_hashes: reader.read_array([0; MAX_NEW_NOTE_HASHES_PER_TX]), new_nullifiers: reader.read_array([0; MAX_NEW_NULLIFIERS_PER_TX]), new_l2_to_l1_msgs: reader.read_array([0; MAX_NEW_L2_TO_L1_MSGS_PER_TX]), + note_encrypted_logs_hash: reader.read(), encrypted_logs_hash: reader.read(), unencrypted_logs_hash: reader.read(), encrypted_log_preimages_length: reader.read(), @@ -132,6 +143,7 @@ impl Eq for CombinedAccumulatedData { (self.new_note_hashes == other.new_note_hashes) & (self.new_nullifiers == other.new_nullifiers) & (self.new_l2_to_l1_msgs == other.new_l2_to_l1_msgs) & + (self.note_encrypted_logs_hash == other.note_encrypted_logs_hash) & (self.encrypted_logs_hash == other.encrypted_logs_hash) & (self.unencrypted_logs_hash == other.unencrypted_logs_hash) & (self.encrypted_log_preimages_length == other.encrypted_log_preimages_length) & diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr index 4984175614c..8d225670ec8 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr @@ -1,7 +1,7 @@ use crate::{ abis::{ call_request::CallRequest, gas::Gas, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, - side_effect::SideEffect + log_hash::{LogHash, NoteLogHash} }, traits::{Serialize, Deserialize, Eq, Empty}, messaging::l2_to_l1_message::ScopedL2ToL1Message, utils::reader::Reader @@ -9,7 +9,7 @@ use crate::{ use crate::constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX, PRIVATE_ACCUMULATED_DATA_LENGTH + MAX_UNENCRYPTED_LOGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX, PRIVATE_ACCUMULATED_DATA_LENGTH }; struct PrivateAccumulatedData { @@ -17,8 +17,9 @@ struct PrivateAccumulatedData { new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [ScopedL2ToL1Message; MAX_NEW_L2_TO_L1_MSGS_PER_TX], - encrypted_logs_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], - unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_TX], + note_encrypted_logs_hashes: [NoteLogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_TX], + encrypted_logs_hashes: [LogHash; MAX_ENCRYPTED_LOGS_PER_TX], + unencrypted_logs_hashes: [LogHash; MAX_UNENCRYPTED_LOGS_PER_TX], // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -45,6 +46,10 @@ impl Serialize for PrivateAccumulatedData { fields.extend_from_array(self.new_l2_to_l1_msgs[i].serialize()); } + for i in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.note_encrypted_logs_hashes[i].serialize()); + } + for i in 0..MAX_ENCRYPTED_LOGS_PER_TX { fields.extend_from_array(self.encrypted_logs_hashes[i].serialize()); } @@ -78,8 +83,9 @@ impl Deserialize for PrivateAccumulatedData { new_note_hashes: reader.read_struct_array(ScopedNoteHash::deserialize, [ScopedNoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_TX]), new_nullifiers: reader.read_struct_array(ScopedNullifier::deserialize, [ScopedNullifier::empty(); MAX_NEW_NULLIFIERS_PER_TX]), new_l2_to_l1_msgs: reader.read_struct_array(ScopedL2ToL1Message::deserialize, [ScopedL2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_TX]), - encrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_TX]), - unencrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_TX]), + note_encrypted_logs_hashes: reader.read_struct_array(NoteLogHash::deserialize, [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_TX]), + encrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_TX]), + unencrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_TX]), encrypted_log_preimages_length: reader.read(), unencrypted_log_preimages_length: reader.read(), private_call_stack: reader.read_struct_array(CallRequest::deserialize, [CallRequest::empty(); MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX]), @@ -95,6 +101,7 @@ impl Eq for PrivateAccumulatedData { (self.new_note_hashes == other.new_note_hashes) & (self.new_nullifiers == other.new_nullifiers) & (self.new_l2_to_l1_msgs == other.new_l2_to_l1_msgs) & + (self.note_encrypted_logs_hashes == other.note_encrypted_logs_hashes) & (self.encrypted_logs_hashes == other.encrypted_logs_hashes) & (self.unencrypted_logs_hashes == other.unencrypted_logs_hashes) & (self.encrypted_log_preimages_length == other.encrypted_log_preimages_length) & @@ -110,8 +117,9 @@ impl Empty for PrivateAccumulatedData { new_note_hashes: [ScopedNoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_TX], new_nullifiers: [ScopedNullifier::empty(); MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [ScopedL2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_TX], - encrypted_logs_hashes: [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_TX], - unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_TX], + note_encrypted_logs_hashes: [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_TX], + encrypted_logs_hashes: [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_TX], + unencrypted_logs_hashes: [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_TX], encrypted_log_preimages_length: 0, unencrypted_log_preimages_length: 0, private_call_stack: [CallRequest::empty(); MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr index 08597103279..ecf583f00e9 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr @@ -1,5 +1,5 @@ use crate::{ - hash::compute_tx_logs_hash, + hash::{compute_tx_logs_hash, compute_tx_note_logs_hash}, abis::{ gas::Gas, accumulated_data::{ @@ -8,13 +8,13 @@ use crate::{ public_accumulated_data_builder::PublicAccumulatedDataBuilder }, call_request::CallRequest, note_hash::{NoteHash, ScopedNoteHash}, nullifier::ScopedNullifier, - public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect + public_data_update_request::PublicDataUpdateRequest, log_hash::{LogHash, NoteLogHash} }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, - DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE }, messaging::l2_to_l1_message::ScopedL2ToL1Message, traits::{Empty, is_empty} }; @@ -28,8 +28,9 @@ struct PrivateAccumulatedDataBuilder { new_nullifiers: BoundedVec, new_l2_to_l1_msgs: BoundedVec, - encrypted_logs_hashes: BoundedVec, - unencrypted_logs_hashes: BoundedVec, + note_encrypted_logs_hashes: BoundedVec, + encrypted_logs_hashes: BoundedVec, + unencrypted_logs_hashes: BoundedVec, // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -47,6 +48,7 @@ impl PrivateAccumulatedDataBuilder { new_note_hashes: self.new_note_hashes.storage, new_nullifiers: self.new_nullifiers.storage, new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + note_encrypted_logs_hashes: self.note_encrypted_logs_hashes.storage, encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -58,6 +60,7 @@ impl PrivateAccumulatedDataBuilder { pub fn to_combined(self, teardown_gas: Gas) -> CombinedAccumulatedData { // TODO(Miranda): Hash here or elsewhere? + let note_encrypted_logs_hash = compute_tx_note_logs_hash(self.note_encrypted_logs_hashes.storage); let encrypted_logs_hash = compute_tx_logs_hash(self.encrypted_logs_hashes.storage); let unencrypted_logs_hash = compute_tx_logs_hash(self.unencrypted_logs_hashes.storage); let gas_used = self.to_metered_gas_used() + Gas::tx_overhead() + teardown_gas; @@ -66,6 +69,7 @@ impl PrivateAccumulatedDataBuilder { new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), + note_encrypted_logs_hash, encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -172,12 +176,30 @@ impl PrivateAccumulatedDataBuilder { } } + for i in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX { + let note_encrypted_logs_hash_private = self.note_encrypted_logs_hashes.storage[i]; + let note_encrypted_logs_hash = note_encrypted_logs_hash_private.expose_to_public(); + if note_encrypted_logs_hash_private.counter < min_revertible_side_effect_counter { + non_revertible_builder.note_encrypted_logs_hashes.push(note_encrypted_logs_hash); + non_revertible_builder.encrypted_log_preimages_length += note_encrypted_logs_hash.length; + non_revertible_da_gas_used += note_encrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; + } else { + revertible_builder.note_encrypted_logs_hashes.push(note_encrypted_logs_hash); + revertible_builder.encrypted_log_preimages_length += note_encrypted_logs_hash.length; + revertible_da_gas_used += note_encrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; + } + } + for i in 0..MAX_ENCRYPTED_LOGS_PER_TX { let encrypted_logs_hash = self.encrypted_logs_hashes.storage[i]; if encrypted_logs_hash.counter < min_revertible_side_effect_counter { non_revertible_builder.encrypted_logs_hashes.push(encrypted_logs_hash); + non_revertible_builder.encrypted_log_preimages_length += encrypted_logs_hash.length; + non_revertible_da_gas_used += encrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; } else { revertible_builder.encrypted_logs_hashes.push(encrypted_logs_hash); + revertible_builder.encrypted_log_preimages_length += encrypted_logs_hash.length; + revertible_da_gas_used += encrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; } } @@ -185,16 +207,25 @@ impl PrivateAccumulatedDataBuilder { let unencrypted_logs_hash = self.unencrypted_logs_hashes.storage[i]; if unencrypted_logs_hash.counter < min_revertible_side_effect_counter { non_revertible_builder.unencrypted_logs_hashes.push(unencrypted_logs_hash); + non_revertible_builder.unencrypted_log_preimages_length += unencrypted_logs_hash.length; + non_revertible_da_gas_used += unencrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; } else { revertible_builder.unencrypted_logs_hashes.push(unencrypted_logs_hash); + revertible_builder.unencrypted_log_preimages_length += unencrypted_logs_hash.length; + revertible_da_gas_used += unencrypted_logs_hash.length as u32 * DA_GAS_PER_BYTE; } } - // TODO(1641) & TODO(4712): Once we track logs with more info, including individual lens, split here - revertible_builder.encrypted_log_preimages_length = self.encrypted_log_preimages_length; - revertible_builder.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length; - - revertible_da_gas_used += DA_GAS_PER_BYTE * (self.encrypted_log_preimages_length as u32 + self.unencrypted_log_preimages_length as u32); + assert( + revertible_builder.encrypted_log_preimages_length + + non_revertible_builder.encrypted_log_preimages_length + == self.encrypted_log_preimages_length, "encrypted logs length mismatch" + ); + assert( + revertible_builder.unencrypted_log_preimages_length + + non_revertible_builder.unencrypted_log_preimages_length + == self.unencrypted_log_preimages_length, "unencrypted logs length mismatch" + ); revertible_builder.gas_used = Gas::new(revertible_da_gas_used, revertible_l2_gas_used); non_revertible_builder.gas_used = Gas::tx_overhead() + Gas::new(non_revertible_da_gas_used, non_revertible_l2_gas_used); @@ -208,7 +239,7 @@ mod tests { accumulated_data::private_accumulated_data_builder::PrivateAccumulatedDataBuilder, gas::Gas, call_request::CallRequest, caller_context::CallerContext, note_hash::NoteHash, nullifier::Nullifier, public_data_update_request::PublicDataUpdateRequest, - side_effect::SideEffect + log_hash::{LogHash, NoteLogHash} }, address::{AztecAddress, EthAddress}, messaging::l2_to_l1_message::L2ToL1Message, utils::arrays::array_eq, constants::{DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE} @@ -219,22 +250,24 @@ mod tests { let mut builder = PrivateAccumulatedDataBuilder::empty(); let contract_address = AztecAddress::from_field(8989); - let min_revertible_side_effect_counter = 7; + let min_revertible_side_effect_counter = 13; - // Non revertible: counter < 7 + // Non revertible: counter < 13 let non_revertible_note_hashes = [ NoteHash { value: 1, counter: 1 }.scope(20, contract_address), - NoteHash { value: 2, counter: 3 }.scope(5, contract_address) + NoteHash { value: 2, counter: 4 }.scope(5, contract_address) ]; + let non_revertible_note_logs = [NoteLogHash { value: 11, counter: 2, length: 2, note_hash_counter: 1 }]; + let non_revertible_nullifiers = [ - Nullifier { value: 10, note_hash: 1, counter: 2 }.scope(contract_address), - Nullifier { value: 20, note_hash: 2, counter: 4 }.scope(contract_address) + Nullifier { value: 10, note_hash: 1, counter: 3 }.scope(contract_address), + Nullifier { value: 20, note_hash: 2, counter: 5 }.scope(contract_address) ]; let non_revertible_l2_to_l1_messages = [ - L2ToL1Message { recipient: EthAddress::from_field(3030), content: 333333, counter: 5 }.scope(AztecAddress::from_field(9900)) + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 333333, counter: 6 }.scope(AztecAddress::from_field(9900)) ]; let non_revertible_public_stack = [ @@ -242,32 +275,44 @@ mod tests { hash: 1, caller_contract_address: AztecAddress::from_field(1), caller_context: CallerContext::empty(), - start_side_effect_counter: 5, + start_side_effect_counter: 6, end_side_effect_counter: 0 }, CallRequest { hash: 2, caller_contract_address: AztecAddress::from_field(1), caller_context: CallerContext::empty(), - start_side_effect_counter: 6, + start_side_effect_counter: 7, end_side_effect_counter: 0 } ]; - // Revertible: counter >= 7 + let non_revertible_enc_log_hashes = [ + LogHash { value: 11, counter: 9, length: 2 }, + LogHash { value: 22, counter: 10, length: 2 } + ]; + + let non_revertible_unenc_log_hashes = [ + LogHash { value: 33, counter: 11, length: 5 }, + LogHash { value: 44, counter: 12, length: 5 } + ]; + + // Revertible: counter >= 13 let revertible_note_hashes = [ - NoteHash { value: 3, counter: 7 }.scope(15, contract_address), - NoteHash { value: 4, counter: 10 }.scope(0, contract_address) + NoteHash { value: 3, counter: 13 }.scope(15, contract_address), + NoteHash { value: 4, counter: 16 }.scope(0, contract_address) ]; + let revertible_note_logs = [NoteLogHash { value: 33, counter: 14, length: 2, note_hash_counter: 13 }]; + let revertible_nullifiers = [ - Nullifier { value: 30, note_hash: 3, counter: 8 }.scope(contract_address), - Nullifier { value: 40, note_hash: 4, counter: 11 }.scope(contract_address) + Nullifier { value: 30, note_hash: 3, counter: 15 }.scope(contract_address), + Nullifier { value: 40, note_hash: 4, counter: 18 }.scope(contract_address) ]; let revertible_l2_to_l1_messages = [ - L2ToL1Message { recipient: EthAddress::from_field(3030), content: 444444, counter: 13 }.scope(AztecAddress::from_field(7788)) + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 444444, counter: 19 }.scope(AztecAddress::from_field(7788)) ]; let revertible_public_call_stack = [ @@ -275,11 +320,21 @@ mod tests { hash: 3, caller_contract_address: AztecAddress::from_field(3), caller_context: CallerContext::empty(), - start_side_effect_counter: 9, + start_side_effect_counter: 17, end_side_effect_counter: 0 } ]; + let revertible_enc_log_hashes = [ + LogHash { value: 55, counter: 20, length: 2 }, + LogHash { value: 66, counter: 21, length: 2 } + ]; + + let revertible_unenc_log_hashes = [ + LogHash { value: 77, counter: 22, length: 5 }, + LogHash { value: 88, counter: 23, length: 5 } + ]; + builder.new_note_hashes.extend_from_array(non_revertible_note_hashes); builder.new_note_hashes.extend_from_array(revertible_note_hashes); @@ -292,6 +347,20 @@ mod tests { builder.public_call_stack.extend_from_array(non_revertible_public_stack); builder.public_call_stack.extend_from_array(revertible_public_call_stack); + builder.note_encrypted_logs_hashes.extend_from_array(non_revertible_note_logs); + builder.note_encrypted_logs_hashes.extend_from_array(revertible_note_logs); + + builder.encrypted_logs_hashes.extend_from_array(non_revertible_enc_log_hashes); + builder.encrypted_logs_hashes.extend_from_array(revertible_enc_log_hashes); + builder.encrypted_log_preimages_length = 12; + + builder.unencrypted_logs_hashes.extend_from_array(non_revertible_unenc_log_hashes); + builder.unencrypted_logs_hashes.extend_from_array(revertible_unenc_log_hashes); + builder.unencrypted_log_preimages_length = 20; + + let public_non_revertible_note_logs = non_revertible_note_logs.map(|n: NoteLogHash| n.expose_to_public()); + let public_revertible_note_logs = revertible_note_logs.map(|n: NoteLogHash| n.expose_to_public()); + let (non_revertible, revertible) = builder.split_to_public(min_revertible_side_effect_counter, Gas::new(42, 17)); assert( @@ -314,6 +383,24 @@ mod tests { ); assert(array_eq(non_revertible.new_l2_to_l1_msgs, [333333])); assert(array_eq(non_revertible.public_call_stack, non_revertible_public_stack)); + assert( + array_eq( + non_revertible.note_encrypted_logs_hashes, + public_non_revertible_note_logs + ) + ); + assert( + array_eq( + non_revertible.encrypted_logs_hashes, + non_revertible_enc_log_hashes + ) + ); + assert( + array_eq( + non_revertible.unencrypted_logs_hashes, + non_revertible_unenc_log_hashes + ) + ); assert( array_eq( @@ -335,14 +422,40 @@ mod tests { ); assert(array_eq(revertible.new_l2_to_l1_msgs, [444444])); assert(array_eq(revertible.public_call_stack, revertible_public_call_stack)); + assert( + array_eq( + revertible.note_encrypted_logs_hashes, + public_revertible_note_logs + ) + ); + assert(array_eq(revertible.encrypted_logs_hashes, revertible_enc_log_hashes)); + assert( + array_eq( + revertible.unencrypted_logs_hashes, + revertible_unenc_log_hashes + ) + ); assert_eq( - revertible.gas_used, Gas::new(4 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE, 0) + Gas::new(42, 17) + revertible.gas_used, Gas::new( + (4 * DA_BYTES_PER_FIELD + + revertible.encrypted_log_preimages_length as u32 + + revertible.unencrypted_log_preimages_length as u32) + * DA_GAS_PER_BYTE, + 0 + ) + + Gas::new(42, 17) ); - print(non_revertible.gas_used); assert_eq( - non_revertible.gas_used, Gas::new(4 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE, 0) + Gas::tx_overhead() + non_revertible.gas_used, Gas::new( + (4 * DA_BYTES_PER_FIELD + + non_revertible.encrypted_log_preimages_length as u32 + + non_revertible.unencrypted_log_preimages_length as u32) + * DA_GAS_PER_BYTE, + 0 + ) + + Gas::tx_overhead() ); } } @@ -353,6 +466,7 @@ impl Empty for PrivateAccumulatedDataBuilder { new_note_hashes: BoundedVec::new(), new_nullifiers: BoundedVec::new(), new_l2_to_l1_msgs: BoundedVec::new(), + note_encrypted_logs_hashes: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), encrypted_log_preimages_length: 0, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data.nr index 518a24b1675..06f23069398 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data.nr @@ -1,12 +1,12 @@ use crate::{ abis::{ call_request::CallRequest, public_data_update_request::PublicDataUpdateRequest, gas::Gas, - note_hash::NoteHash, nullifier::Nullifier, side_effect::SideEffect + note_hash::NoteHash, nullifier::Nullifier, log_hash::{LogHash, NoteLogHash} }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX, PUBLIC_ACCUMULATED_DATA_LENGTH + MAX_UNENCRYPTED_LOGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX, PUBLIC_ACCUMULATED_DATA_LENGTH }, traits::{Empty, Serialize, Deserialize}, utils::reader::Reader }; @@ -16,8 +16,9 @@ struct PublicAccumulatedData { new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], - encrypted_logs_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], - unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_TX], + note_encrypted_logs_hashes: [NoteLogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_TX], + encrypted_logs_hashes: [LogHash; MAX_ENCRYPTED_LOGS_PER_TX], + unencrypted_logs_hashes: [LogHash; MAX_UNENCRYPTED_LOGS_PER_TX], // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -37,8 +38,9 @@ impl Empty for PublicAccumulatedData { new_note_hashes: [NoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_TX], new_nullifiers: [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_TX], new_l2_to_l1_msgs: [0; MAX_NEW_L2_TO_L1_MSGS_PER_TX], - encrypted_logs_hashes: [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_TX], - unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_TX], + note_encrypted_logs_hashes: [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_TX], + encrypted_logs_hashes: [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_TX], + unencrypted_logs_hashes: [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_TX], encrypted_log_preimages_length: 0, unencrypted_log_preimages_length: 0, public_data_update_requests: [PublicDataUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], @@ -62,6 +64,10 @@ impl Serialize for PublicAccumulatedData { fields.extend_from_array(self.new_l2_to_l1_msgs); + for i in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.note_encrypted_logs_hashes[i].serialize()); + } + for i in 0..MAX_ENCRYPTED_LOGS_PER_TX { fields.extend_from_array(self.encrypted_logs_hashes[i].serialize()); } @@ -97,8 +103,9 @@ impl Deserialize for PublicAccumulatedData { new_note_hashes: reader.read_struct_array(NoteHash::deserialize, [NoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_TX]), new_nullifiers: reader.read_struct_array(Nullifier::deserialize, [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_TX]), new_l2_to_l1_msgs: reader.read_array([0; MAX_NEW_L2_TO_L1_MSGS_PER_TX]), - encrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_TX]), - unencrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_TX]), + note_encrypted_logs_hashes: reader.read_struct_array(NoteLogHash::deserialize, [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_TX]), + encrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_TX]), + unencrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_TX]), encrypted_log_preimages_length: reader.read(), unencrypted_log_preimages_length: reader.read(), public_data_update_requests: reader.read_struct_array(PublicDataUpdateRequest::deserialize, [PublicDataUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]), @@ -115,6 +122,7 @@ impl Eq for PublicAccumulatedData { (self.new_note_hashes == other.new_note_hashes) & (self.new_nullifiers == other.new_nullifiers) & (self.new_l2_to_l1_msgs == other.new_l2_to_l1_msgs) & + (self.note_encrypted_logs_hashes == other.note_encrypted_logs_hashes) & (self.encrypted_logs_hashes == other.encrypted_logs_hashes) & (self.unencrypted_logs_hashes == other.unencrypted_logs_hashes) & (self.encrypted_log_preimages_length == other.encrypted_log_preimages_length) & diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data_builder.nr index dcf35fcf50f..104458d673c 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/public_accumulated_data_builder.nr @@ -2,12 +2,12 @@ use crate::{ abis::{ gas::Gas, accumulated_data::public_accumulated_data::PublicAccumulatedData, call_request::CallRequest, note_hash::NoteHash, nullifier::Nullifier, - public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect + public_data_update_request::PublicDataUpdateRequest, log_hash::{LogHash, NoteLogHash} }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX + MAX_UNENCRYPTED_LOGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX }, traits::Empty }; @@ -17,8 +17,9 @@ struct PublicAccumulatedDataBuilder { new_nullifiers: BoundedVec, new_l2_to_l1_msgs: BoundedVec, - encrypted_logs_hashes: BoundedVec, - unencrypted_logs_hashes: BoundedVec, + note_encrypted_logs_hashes: BoundedVec, + encrypted_logs_hashes: BoundedVec, + unencrypted_logs_hashes: BoundedVec, // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -38,6 +39,7 @@ impl PublicAccumulatedDataBuilder { new_note_hashes: self.new_note_hashes.storage, new_nullifiers: self.new_nullifiers.storage, new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + note_encrypted_logs_hashes: self.note_encrypted_logs_hashes.storage, encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -55,6 +57,7 @@ impl Empty for PublicAccumulatedDataBuilder { new_note_hashes: BoundedVec::new(), new_nullifiers: BoundedVec::new(), new_l2_to_l1_msgs: BoundedVec::new(), + note_encrypted_logs_hashes: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), encrypted_log_preimages_length: 0, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr new file mode 100644 index 00000000000..a88bf41fa5c --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr @@ -0,0 +1,126 @@ +use crate::{ + abis::side_effect::{Ordered, OrderedValue}, constants::{LOG_HASH_LENGTH, NOTE_LOG_HASH_LENGTH}, + traits::{Empty, Serialize, Deserialize} +}; + +struct LogHash { + value: Field, + counter: u32, + length: Field, +} + +impl Ordered for LogHash { + fn counter(self) -> u32 { + self.counter + } +} + +impl OrderedValue for LogHash { + fn value(self) -> Field { + self.value + } + fn counter(self) -> u32 { + self.counter + } +} + +impl Eq for LogHash { + fn eq(self, other: LogHash) -> bool { + (self.value == other.value) + & (self.counter == other.counter) + & (self.length == other.length) + } +} + +impl Empty for LogHash { + fn empty() -> Self { + LogHash { + value: 0, + counter: 0, + length: 0, + } + } +} + +impl Serialize for LogHash { + fn serialize(self) -> [Field; LOG_HASH_LENGTH] { + [self.value, self.counter as Field, self.length] + } +} + +impl Deserialize for LogHash { + fn deserialize(values: [Field; LOG_HASH_LENGTH]) -> Self { + Self { + value: values[0], + counter: values[1] as u32, + length: values[2], + } + } +} + +struct NoteLogHash { + value: Field, + counter: u32, + length: Field, + note_hash_counter: u32, +} + +impl NoteLogHash { + pub fn expose_to_public(self) -> NoteLogHash { + // Hide the actual counter and note hash counter when exposing it to the public kernel. + // The counter is usually note_hash.counter + 1, so it can be revealing. + NoteLogHash { value: self.value, counter: 0, length: self.length, note_hash_counter: 0 } + } +} + +impl Ordered for NoteLogHash { + fn counter(self) -> u32 { + self.counter + } +} + +impl OrderedValue for NoteLogHash { + fn value(self) -> Field { + self.value + } + fn counter(self) -> u32 { + self.counter + } +} + +impl Eq for NoteLogHash { + fn eq(self, other: NoteLogHash) -> bool { + (self.value == other.value) + & (self.counter == other.counter) + & (self.length == other.length) + & (self.note_hash_counter == other.note_hash_counter) + } +} + +impl Empty for NoteLogHash { + fn empty() -> Self { + NoteLogHash { + value: 0, + counter: 0, + length: 0, + note_hash_counter: 0, + } + } +} + +impl Serialize for NoteLogHash { + fn serialize(self) -> [Field; NOTE_LOG_HASH_LENGTH] { + [self.value, self.counter as Field, self.length, self.note_hash_counter as Field] + } +} + +impl Deserialize for NoteLogHash { + fn deserialize(values: [Field; NOTE_LOG_HASH_LENGTH]) -> Self { + Self { + value: values[0], + counter: values[1] as u32, + length: values[2], + note_hash_counter: values[3] as u32, + } + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr index e3d17abef66..480d95eacd6 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr @@ -85,6 +85,6 @@ fn empty_hash() { let hash = item.hash(); // Value from private_call_stack_item.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x138c6ad441864ce43487e99d5e1e122c38b4b55d893edec04a32f5aacecc856c; + let test_data_empty_hash = 0x11e550264f1840bab424389aa41ed8a5735c0aa8f94e41bd259caab964ff93dc; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr index fd62b5efee9..01b22811e12 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr @@ -2,7 +2,7 @@ use crate::{ abis::{ call_context::CallContext, max_block_number::MaxBlockNumber, gas_settings::GasSettings, nullifier_key_validation_request::NullifierKeyValidationRequest, note_hash::NoteHash, - nullifier::Nullifier, read_request::ReadRequest, side_effect::SideEffect + nullifier::Nullifier, read_request::ReadRequest, log_hash::{LogHash, NoteLogHash} }, constants::{ MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_CALL, @@ -10,7 +10,7 @@ use crate::{ MAX_NEW_NULLIFIERS_PER_CALL, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS, - MAX_ENCRYPTED_LOGS_PER_CALL, MAX_UNENCRYPTED_LOGS_PER_CALL + MAX_ENCRYPTED_LOGS_PER_CALL, MAX_UNENCRYPTED_LOGS_PER_CALL, MAX_NOTE_ENCRYPTED_LOGS_PER_CALL }, header::Header, hash::pedersen_hash, messaging::l2_to_l1_message::L2ToL1Message, traits::{Deserialize, Hash, Serialize, Empty}, utils::reader::Reader, @@ -41,8 +41,9 @@ struct PrivateCircuitPublicInputs { start_side_effect_counter : u32, end_side_effect_counter : u32, - encrypted_logs_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_CALL], - unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_CALL], + note_encrypted_logs_hashes: [NoteLogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_CALL], + encrypted_logs_hashes: [LogHash; MAX_ENCRYPTED_LOGS_PER_CALL], + unencrypted_logs_hashes: [LogHash; MAX_UNENCRYPTED_LOGS_PER_CALL], // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -76,6 +77,7 @@ impl Eq for PrivateCircuitPublicInputs { (self.new_l2_to_l1_msgs == other.new_l2_to_l1_msgs) & (self.start_side_effect_counter == other.start_side_effect_counter) & (self.end_side_effect_counter == other.end_side_effect_counter) & + (self.note_encrypted_logs_hashes == other.note_encrypted_logs_hashes) & (self.encrypted_logs_hashes == other.encrypted_logs_hashes) & (self.unencrypted_logs_hashes == other.unencrypted_logs_hashes) & (self.encrypted_log_preimages_length == other.encrypted_log_preimages_length) & @@ -120,6 +122,9 @@ impl Serialize for PrivateCircuitPublicInp } fields.push(self.start_side_effect_counter as Field); fields.push(self.end_side_effect_counter as Field); + for i in 0..self.note_encrypted_logs_hashes.len() { + fields.extend_from_array(self.note_encrypted_logs_hashes[i].serialize()); + } for i in 0..self.encrypted_logs_hashes.len() { fields.extend_from_array(self.encrypted_logs_hashes[i].serialize()); } @@ -159,8 +164,9 @@ impl Deserialize for PrivateCircuitPublicI new_l2_to_l1_msgs: reader.read_struct_array(L2ToL1Message::deserialize, [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL]), start_side_effect_counter: reader.read() as u32, end_side_effect_counter: reader.read() as u32, - encrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_CALL]), - unencrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL]), + note_encrypted_logs_hashes: reader.read_struct_array(NoteLogHash::deserialize, [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_CALL]), + encrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_CALL]), + unencrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL]), encrypted_log_preimages_length: reader.read(), unencrypted_log_preimages_length: reader.read(), historical_header: reader.read_struct(Header::deserialize), @@ -198,8 +204,9 @@ impl Empty for PrivateCircuitPublicInputs { new_l2_to_l1_msgs: [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL], start_side_effect_counter : 0 as u32, end_side_effect_counter : 0 as u32, - encrypted_logs_hashes: [SideEffect::empty(); MAX_ENCRYPTED_LOGS_PER_CALL], - unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], + note_encrypted_logs_hashes: [NoteLogHash::empty(); MAX_NOTE_ENCRYPTED_LOGS_PER_CALL], + encrypted_logs_hashes: [LogHash::empty(); MAX_ENCRYPTED_LOGS_PER_CALL], + unencrypted_logs_hashes: [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], encrypted_log_preimages_length: 0, unencrypted_log_preimages_length: 0, historical_header: Header::empty(), @@ -221,6 +228,6 @@ fn empty_hash() { let inputs = PrivateCircuitPublicInputs::empty(); let hash = inputs.hash(); // Value from private_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x2517b9a84487bde68e18647e59530c6ffe4a7a88c5c556f013d09fd22b84ba35; + let test_data_empty_hash = 0x067b9bd773ae49145e07b395da4f156fb35972e77bd4c40ed980ea8c9b90dd64; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr index 8c24533dd6b..6746118086d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr @@ -69,7 +69,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: true, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item request hash" test - let test_data_call_stack_item_request_hash = 0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55; + let test_data_call_stack_item_request_hash = 0x11998b1d33b8ba1c8fa7a6c2f5bc76b31bbaa80400554465c335ba31559ac1f9; assert_eq(call_stack_item.hash(), test_data_call_stack_item_request_hash); } @@ -87,7 +87,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: false, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item hash" test - let test_data_call_stack_item_hash = 0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8; + let test_data_call_stack_item_hash = 0x2b7f8b68d96d0011ecc576459899e9451fbd880568ccc7a071d9cf04e59abb65; assert_eq(call_stack_item.hash(), test_data_call_stack_item_hash); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr index aafdd024ec8..7a0a61119ad 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr @@ -1,7 +1,7 @@ use crate::{ abis::{ call_context::CallContext, note_hash::NoteHash, nullifier::Nullifier, read_request::ReadRequest, - side_effect::SideEffect, gas::Gas, global_variables::GlobalVariables + gas::Gas, global_variables::GlobalVariables, log_hash::LogHash }, address::AztecAddress, constants::{ @@ -36,7 +36,7 @@ struct PublicCircuitPublicInputs { start_side_effect_counter: u32, end_side_effect_counter: u32, - unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_CALL], + unencrypted_logs_hashes: [LogHash; MAX_UNENCRYPTED_LOGS_PER_CALL], // Here so that the gas cost of this request can be measured by circuits, without actually needing to feed in the // variable-length data. @@ -130,7 +130,7 @@ impl Deserialize for PublicCircuitPublicInp new_l2_to_l1_msgs: reader.read_struct_array(L2ToL1Message::deserialize, [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL]), start_side_effect_counter: reader.read() as u32, end_side_effect_counter: reader.read() as u32, - unencrypted_logs_hashes: reader.read_struct_array(SideEffect::deserialize, [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL]), + unencrypted_logs_hashes: reader.read_struct_array(LogHash::deserialize, [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL]), unencrypted_log_preimages_length: reader.read(), historical_header: reader.read_struct(Header::deserialize), global_variables: reader.read_struct(GlobalVariables::deserialize), @@ -168,7 +168,7 @@ impl Empty for PublicCircuitPublicInputs { new_l2_to_l1_msgs: [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL], start_side_effect_counter: 0 as u32, end_side_effect_counter: 0 as u32, - unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], + unencrypted_logs_hashes: [LogHash::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], unencrypted_log_preimages_length: 0, historical_header: Header::empty(), global_variables: GlobalVariables::empty(), @@ -195,6 +195,6 @@ fn empty_hash() { let hash = inputs.hash(); // Value from public_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8; + let test_data_empty_hash = 0x1e4351db0c9aa20836e7009bc3e6a4555c92622c5e9cb3b49e2ec0fbbf59d0bd; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 1799f1e176a..4b2296ba3e5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -30,9 +30,10 @@ global MAX_NEW_L2_TO_L1_MSGS_PER_CALL: u64 = 2; global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: u64 = 16; global MAX_PUBLIC_DATA_READS_PER_CALL: u64 = 16; global MAX_NOTE_HASH_READ_REQUESTS_PER_CALL: u64 = 32; -global MAX_NULLIFIER_READ_REQUESTS_PER_CALL: u64 = 2; // Change it to a larger value when there's a seperate reset circuit. -global MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL: u64 = 2; -global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL: u64 = 1; +global MAX_NULLIFIER_READ_REQUESTS_PER_CALL: u64 = 32; +global MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL: u64 = 32; +global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL: u64 = 16; +global MAX_NOTE_ENCRYPTED_LOGS_PER_CALL: u64 = 16; global MAX_ENCRYPTED_LOGS_PER_CALL: u64 = 4; // If modifying, update DEPLOYER_CONTRACT_ADDRESS. global MAX_UNENCRYPTED_LOGS_PER_CALL: u64 = 4; // If modifying, update DEPLOYER_CONTRACT_ADDRESS. @@ -45,9 +46,10 @@ global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 32; global MAX_PUBLIC_DATA_READS_PER_TX: u64 = 32; global MAX_NEW_L2_TO_L1_MSGS_PER_TX: u64 = 2; global MAX_NOTE_HASH_READ_REQUESTS_PER_TX: u64 = 128; -global MAX_NULLIFIER_READ_REQUESTS_PER_TX: u64 = 8; // Change it to a larger value when there's a seperate reset circuit. -global MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX: u64 = 8; -global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX: u64 = 4; +global MAX_NULLIFIER_READ_REQUESTS_PER_TX: u64 = 128; +global MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX: u64 = 128; +global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX: u64 = 64; +global MAX_NOTE_ENCRYPTED_LOGS_PER_TX: u64 = 64; global MAX_ENCRYPTED_LOGS_PER_TX: u64 = 8; global MAX_UNENCRYPTED_LOGS_PER_TX: u64 = 8; global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; @@ -100,11 +102,9 @@ global MAX_ARGS_LENGTH: u64 = ARGS_HASH_CHUNK_COUNT * ARGS_HASH_CHUNK_LENGTH; global INITIALIZATION_SLOT_SEPARATOR: Field = 1000_000_000; global INITIAL_L2_BLOCK_NUM: Field = 1; global BLOB_SIZE_IN_BYTES: Field = 31 * 4096; -// How much gas is subtracted from L2GASLEFT when making a nested public call by default in the AVM -global NESTED_CALL_L2_GAS_BUFFER = 20000; // CONTRACT CLASS CONSTANTS -global MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS: u64 = 32000; +global MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS: u64 = 20000; // Bytecode size for private functions is per function, not for the entire contract. // Note that private functions bytecode includes a mix of acir and brillig. global MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS: u64 = 3000; @@ -162,6 +162,8 @@ global NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; global SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; global PARTIAL_STATE_REFERENCE_LENGTH: u64 = 6; global READ_REQUEST_LENGTH = 2; +global LOG_HASH_LENGTH = 3; +global NOTE_LOG_HASH_LENGTH = 4; global NOTE_HASH_LENGTH = 2; global SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; global NULLIFIER_LENGTH = 3; @@ -172,8 +174,8 @@ global STATE_REFERENCE_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_ global TX_CONTEXT_LENGTH: u64 = 2 + GAS_SETTINGS_LENGTH; global TX_REQUEST_LENGTH: u64 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; global HEADER_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + CONTENT_COMMITMENT_LENGTH + STATE_REFERENCE_LENGTH + GLOBAL_VARIABLES_LENGTH; -global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 4 + MAX_BLOCK_NUMBER_LENGTH + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + 1 + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; -global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; +global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 4 + MAX_BLOCK_NUMBER_LENGTH + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + 1 + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_CALL) + (LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; +global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; global PRIVATE_CALL_STACK_ITEM_LENGTH: u64 = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; global PUBLIC_CONTEXT_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + GAS_LENGTH + 2; @@ -182,15 +184,15 @@ global PUBLIC_DATA_READ_LENGTH = 2; global VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + (SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX) + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); global PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; -global COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; +global COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 5 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; global COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; global CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; global CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; -global PRIVATE_ACCUMULATED_DATA_LENGTH = (SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); +global PRIVATE_ACCUMULATED_DATA_LENGTH = (SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX) + (LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); global PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + CALL_REQUEST_LENGTH + AZTEC_ADDRESS_LENGTH; -global PUBLIC_ACCUMULATED_DATA_LENGTH = (MAX_NEW_NOTE_HASHES_PER_TX * NOTE_HASH_LENGTH) + (MAX_NEW_NULLIFIERS_PER_TX * NULLIFIER_LENGTH) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * 1) + (MAX_ENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH) + (MAX_UNENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH) + 2 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + (MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX * CALL_REQUEST_LENGTH) + GAS_LENGTH; +global PUBLIC_ACCUMULATED_DATA_LENGTH = (MAX_NEW_NOTE_HASHES_PER_TX * NOTE_HASH_LENGTH) + (MAX_NEW_NULLIFIERS_PER_TX * NULLIFIER_LENGTH) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * 1) + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX) + (MAX_ENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH) + (MAX_UNENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH) + 2 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + (MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX * CALL_REQUEST_LENGTH) + GAS_LENGTH; global PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = VALIDATION_REQUESTS_LENGTH + PUBLIC_ACCUMULATED_DATA_LENGTH + PUBLIC_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + 1 + (MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX * CALL_REQUEST_LENGTH) + AZTEC_ADDRESS_LENGTH; global KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + COMBINED_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH + 1 + AZTEC_ADDRESS_LENGTH; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_point.nr b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_point.nr index 467a022947b..a7caaa39751 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_point.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_point.nr @@ -1,4 +1,4 @@ -use crate::traits::{Serialize, Deserialize}; +use crate::{traits::{Serialize, Deserialize, Hash}, hash::poseidon2_hash}; use dep::std::cmp::Eq; global GRUMPKIN_POINT_SERIALIZED_LEN: Field = 2; @@ -30,6 +30,12 @@ impl Eq for GrumpkinPoint { } } +impl Hash for GrumpkinPoint { + fn hash(self) -> Field { + poseidon2_hash(self.serialize()) + } +} + impl GrumpkinPoint { pub fn new(x: Field, y: Field) -> Self { Self { x, y } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index 6c3678b6bb3..c189a21d365 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -3,13 +3,13 @@ use crate::recursion::verification_key::VerificationKey; use crate::abis::function_selector::FunctionSelector; use crate::abis::contract_class_function_leaf_preimage::ContractClassFunctionLeafPreimage; use crate::contract_class_id::ContractClassId; -use crate::abis::side_effect::SideEffect; +use crate::abis::log_hash::{LogHash, NoteLogHash}; use crate::traits::is_empty; use crate::utils::{uint256::U256, field::field_from_bytes_32_trunc}; use crate::constants::{ FUNCTION_TREE_HEIGHT, GENERATOR_INDEX__SILOED_NOTE_HASH, GENERATOR_INDEX__OUTER_NULLIFIER, GENERATOR_INDEX__VK, GENERATOR_INDEX__NOTE_HASH_NONCE, GENERATOR_INDEX__UNIQUE_NOTE_HASH, - MAX_ENCRYPTED_LOGS_PER_TX + MAX_ENCRYPTED_LOGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX }; use crate::traits::Hash; use crate::messaging::l2_to_l1_message::L2ToL1Message; @@ -142,7 +142,7 @@ pub fn accumulate_sha256(input: [Field; 2]) -> Field { // Computes the final logs hash for a tx. // NB: this assumes MAX_ENCRYPTED_LOGS_PER_TX == MAX_UNENCRYPTED_LOGS_PER_TX // to avoid doubling code, since we can't define the byte len to be 32*N directly. -pub fn compute_tx_logs_hash(logs: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX]) -> Field { +pub fn compute_tx_logs_hash(logs: [LogHash; MAX_ENCRYPTED_LOGS_PER_TX]) -> Field { // Convert each field element into a byte array and append the bytes to `hash_input_flattened` let mut hash_input_flattened = [0; MAX_ENCRYPTED_LOGS_PER_TX * 32]; for offset in 0..MAX_ENCRYPTED_LOGS_PER_TX { @@ -163,6 +163,27 @@ pub fn compute_tx_logs_hash(logs: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX]) -> Fi hash } +pub fn compute_tx_note_logs_hash(logs: [NoteLogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_TX]) -> Field { + // Convert each field element into a byte array and append the bytes to `hash_input_flattened` + let mut hash_input_flattened = [0; MAX_NOTE_ENCRYPTED_LOGS_PER_TX * 32]; + for offset in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX { + let input_as_bytes = logs[offset].value.to_be_bytes(32); + for byte_index in 0..32 { + hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; + } + } + // Ideally we would push to a slice then hash, but there is no sha_slice + // Hardcode to 256 bytes for now + let mut hash = sha256_to_field(hash_input_flattened); + // Not having a 0 value hash for empty logs causes issues with empty txs + // used for padding. Returning early is currently unsupported. + // We always provide sorted logs here, so 0 being empty means all are empty. + if is_empty(logs[0]) { + hash = 0; + } + hash +} + pub fn pedersen_hash(inputs: [Field; N], hash_index: u32) -> Field { dep::std::hash::pedersen_hash_with_separator(inputs, hash_index) } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index a1d2159d279..c27429ffbf2 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -10,7 +10,7 @@ use crate::{ nullifier::{Nullifier, ScopedNullifier}, nullifier_key_validation_request::{ScopedNullifierKeyValidationRequest, NullifierKeyValidationRequest}, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, - read_request::{ReadRequest, ScopedReadRequest}, side_effect::SideEffect, + read_request::{ReadRequest, ScopedReadRequest}, log_hash::{LogHash, NoteLogHash}, validation_requests::{ValidationRequests, ValidationRequestsBuilder} }, address::{AztecAddress, EthAddress}, @@ -20,7 +20,8 @@ use crate::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - VK_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX + VK_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX }, hash::silo_nullifier, header::Header, messaging::l2_to_l1_message::{L2ToL1Message, ScopedL2ToL1Message}, @@ -44,8 +45,10 @@ struct FixtureBuilder { new_note_hashes: BoundedVec, new_nullifiers: BoundedVec, new_l2_to_l1_msgs: BoundedVec, - encrypted_logs_hashes: BoundedVec, - unencrypted_logs_hashes: BoundedVec, + note_encrypted_logs_hashes: BoundedVec, + encrypted_logs_hashes: BoundedVec, + unencrypted_logs_hashes: BoundedVec, + note_encrypted_logs_hash: Field, encrypted_logs_hash: Field, unencrypted_logs_hash: Field, encrypted_log_preimages_length: Field, @@ -92,8 +95,10 @@ impl FixtureBuilder { new_note_hashes: BoundedVec::new(), new_nullifiers: BoundedVec::new(), new_l2_to_l1_msgs: BoundedVec::new(), + note_encrypted_logs_hashes: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), + note_encrypted_logs_hash: 0, encrypted_logs_hash: 0, unencrypted_logs_hash: 0, encrypted_log_preimages_length: 0, @@ -135,6 +140,7 @@ impl FixtureBuilder { new_note_hashes: self.new_note_hashes, new_nullifiers: self.new_nullifiers, new_l2_to_l1_msgs: self.new_l2_to_l1_msgs, + note_encrypted_logs_hashes: self.note_encrypted_logs_hashes, encrypted_logs_hashes: self.encrypted_logs_hashes, unencrypted_logs_hashes: self.unencrypted_logs_hashes, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -150,6 +156,7 @@ impl FixtureBuilder { new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash), new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier), new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), + note_encrypted_logs_hashes: self.note_encrypted_logs_hashes.storage, encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -165,6 +172,7 @@ impl FixtureBuilder { new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), + note_encrypted_logs_hash: self.note_encrypted_logs_hash, encrypted_logs_hash: self.encrypted_logs_hash, unencrypted_logs_hash: self.unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -275,12 +283,24 @@ impl FixtureBuilder { self.new_note_hashes.push(NoteHash { value, counter: self.next_counter() }.scope(0, self.storage_contract_address)); } - pub fn append_new_note_hashes(&mut self, num_new_note_hashes: u64) { + pub fn add_broadcast_new_note_hash(&mut self, value: Field) { + self.new_note_hashes.push(NoteHash { value, counter: self.next_counter() }.scope(0, self.storage_contract_address)); + self.note_encrypted_logs_hashes.push( + NoteLogHash { value: value + 1, counter: self.next_counter(), length: 64, note_hash_counter: self.counter - 2 } + ); + self.encrypted_log_preimages_length += 64; + } + + pub fn append_new_note_hashes(&mut self, num_new_note_hashes: u64, broadcast: bool) { let index_offset = self.new_note_hashes.len(); for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { if i < num_new_note_hashes { let mocked_value = self.get_mocked_note_hash_value(index_offset + i); - self.add_new_note_hash(mocked_value); + if (broadcast) { + self.add_broadcast_new_note_hash(mocked_value); + } else { + self.add_new_note_hash(mocked_value); + } } } } @@ -408,13 +428,13 @@ impl FixtureBuilder { } pub fn set_encrypted_logs(&mut self, hash: Field, preimages_length: Field) { - let side_effect = SideEffect { value: hash, counter: self.next_counter() }; + let side_effect = LogHash { value: hash, counter: self.next_counter(), length: preimages_length }; self.encrypted_logs_hashes.push(side_effect); self.encrypted_log_preimages_length += preimages_length; } pub fn set_unencrypted_logs(&mut self, hash: Field, preimages_length: Field) { - let side_effect = SideEffect { value: hash, counter: self.next_counter() }; + let side_effect = LogHash { value: hash, counter: self.next_counter(), length: preimages_length }; self.unencrypted_logs_hashes.push(side_effect); self.unencrypted_log_preimages_length += preimages_length; } @@ -507,8 +527,10 @@ impl Empty for FixtureBuilder { new_note_hashes: BoundedVec::new(), new_nullifiers: BoundedVec::new(), new_l2_to_l1_msgs: BoundedVec::new(), + note_encrypted_logs_hashes: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), + note_encrypted_logs_hash: 0, encrypted_logs_hash: 0, unencrypted_logs_hash: 0, encrypted_log_preimages_length: 0, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index 5268ac1eea6..c3d29483cca 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -3,7 +3,7 @@ use crate::{ gas_settings::GasSettings, call_request::{CallerContext, CallRequest}, private_call_stack_item::PrivateCallStackItem, function_data::FunctionData, max_block_number::MaxBlockNumber, private_circuit_public_inputs::PrivateCircuitPublicInputs, - private_kernel::private_call_data::PrivateCallData, side_effect::SideEffect + private_kernel::private_call_data::PrivateCallData, log_hash::LogHash }, merkle_tree::membership::MembershipWitness, address::{AztecAddress, EthAddress, SaltedInitializationHash, PublicKeysHash}, @@ -130,7 +130,7 @@ impl PrivateCallDataBuilder { } } - pub fn add_teaddown_call_request(&mut self, is_delegate_call: bool) { + pub fn add_teardown_call_request(&mut self, is_delegate_call: bool) { let hash = 909090; self.public_teardown_call_request = self.public_inputs.generate_call_request(hash, is_delegate_call); self.public_inputs.add_teardown_call_request(hash); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr index 20dd2efc32e..edcab97af36 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr @@ -4,7 +4,7 @@ use crate::{ gas_settings::GasSettings, gas::Gas, max_block_number::MaxBlockNumber, note_hash::NoteHash, nullifier::Nullifier, nullifier_key_validation_request::NullifierKeyValidationRequest, private_circuit_public_inputs::PrivateCircuitPublicInputs, read_request::ReadRequest, - side_effect::SideEffect + log_hash::{LogHash, NoteLogHash} }, address::{AztecAddress, compute_initialization_hash}, header::Header, messaging::l2_to_l1_message::L2ToL1Message, tests::fixtures, transaction::tx_context::TxContext @@ -15,7 +15,7 @@ use crate::{ MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_ENCRYPTED_LOGS_PER_CALL, - MAX_UNENCRYPTED_LOGS_PER_CALL + MAX_UNENCRYPTED_LOGS_PER_CALL, MAX_NOTE_ENCRYPTED_LOGS_PER_CALL }, traits::Empty }; @@ -44,8 +44,9 @@ struct PrivateCircuitPublicInputsBuilder { public_teardown_function_hash: Field, new_l2_to_l1_msgs: BoundedVec, - encrypted_logs_hashes: BoundedVec, - unencrypted_logs_hashes: BoundedVec, + note_encrypted_logs_hashes: BoundedVec, + encrypted_logs_hashes: BoundedVec, + unencrypted_logs_hashes: BoundedVec, encrypted_log_preimages_length: Field, unencrypted_log_preimages_length: Field, @@ -153,13 +154,13 @@ impl PrivateCircuitPublicInputsBuilder { } pub fn add_encrypted_log(&mut self, hash: Field, preimages_length: Field) { - let side_effect = SideEffect { value: hash, counter: self.next_counter() }; + let side_effect = LogHash { value: hash, counter: self.next_counter(), length: preimages_length }; self.encrypted_logs_hashes.push(side_effect); self.encrypted_log_preimages_length += preimages_length; } pub fn add_unencrypted_log(&mut self, hash: Field, preimages_length: Field) { - let side_effect = SideEffect { value: hash, counter: self.next_counter() }; + let side_effect = LogHash { value: hash, counter: self.next_counter(), length: preimages_length }; self.unencrypted_logs_hashes.push(side_effect); self.unencrypted_log_preimages_length += preimages_length; } @@ -216,6 +217,7 @@ impl PrivateCircuitPublicInputsBuilder { new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, start_side_effect_counter: self.counter_start, end_side_effect_counter: self.counter_end, + note_encrypted_logs_hashes: self.note_encrypted_logs_hashes.storage, encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -251,6 +253,7 @@ impl Empty for PrivateCircuitPublicInputsBuilder { public_call_stack_hashes: BoundedVec::new(), public_teardown_function_hash: 0, new_l2_to_l1_msgs: BoundedVec::new(), + note_encrypted_logs_hashes: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), encrypted_log_preimages_length: 0, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_call_data_builder.nr index 40c379f0d7c..9f66f8cf62e 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_call_data_builder.nr @@ -3,7 +3,7 @@ use crate::{ gas_settings::GasSettings, gas::Gas, call_context::CallContext, call_request::{CallerContext, CallRequest}, function_data::FunctionData, public_call_data::PublicCallData, public_call_stack_item::PublicCallStackItem, - public_circuit_public_inputs::PublicCircuitPublicInputs, side_effect::SideEffect + public_circuit_public_inputs::PublicCircuitPublicInputs, log_hash::LogHash }, address::{AztecAddress, EthAddress}, contrakt::{storage_read::StorageRead, storage_update_request::StorageUpdateRequest}, mocked::Proof, @@ -145,7 +145,7 @@ impl PublicCallDataBuilder { pub fn set_unencrypted_logs(&mut self, hash: Field, preimages_length: Field) { // Counter set as 0 for testing, like note read requests - let side_effect = SideEffect { value: hash, counter: 0 }; + let side_effect = LogHash { value: hash, counter: 0, length: preimages_length }; self.public_inputs.unencrypted_logs_hashes.push(side_effect); self.public_inputs.unencrypted_log_preimages_length += preimages_length; } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_circuit_public_inputs_builder.nr index d18db5dca8b..0aba746f283 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/public_circuit_public_inputs_builder.nr @@ -2,7 +2,7 @@ use crate::{ abis::{ gas::Gas, call_context::CallContext, note_hash::NoteHash, nullifier::Nullifier, public_circuit_public_inputs::PublicCircuitPublicInputs, read_request::ReadRequest, - side_effect::SideEffect, global_variables::GlobalVariables + log_hash::LogHash, global_variables::GlobalVariables }, address::AztecAddress, contrakt::{storage_read::StorageRead, storage_update_request::StorageUpdateRequest}, header::Header, @@ -32,7 +32,7 @@ struct PublicCircuitPublicInputsBuilder { new_l2_to_l1_msgs: BoundedVec, start_side_effect_counter: u32, end_side_effect_counter: u32, - unencrypted_logs_hashes: BoundedVec, + unencrypted_logs_hashes: BoundedVec, unencrypted_log_preimages_length: Field, historical_header: Header, global_variables: GlobalVariables, diff --git a/noir/Earthfile b/noir/Earthfile index ddfb79bc5e3..9337b11452a 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -44,6 +44,21 @@ test: COPY noir-repo/.rustfmt.toml noir-repo/.rustfmt.toml RUN ./scripts/test_native.sh +examples: + FROM +nargo + ENV PATH="/usr/src/noir-repo/target/release:${PATH}" + + COPY --dir noir-repo/examples noir-repo + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + + ENV BACKEND=/usr/src/barretenberg/cpp/build/bin/bb + + WORKDIR noir-repo/examples/codegen-verifier + RUN ./test.sh + + WORKDIR ../prove_and_verify + RUN ./test.sh + format: FROM +nargo ENV PATH=$PATH:/usr/src/noir-repo/target/release diff --git a/noir/README.md b/noir/README.md index 53bfd3139af..4a4eb261663 100644 --- a/noir/README.md +++ b/noir/README.md @@ -2,3 +2,32 @@ We subrepo noir into the folder `noir-repo`. This folder contains dockerfiles and scripts for performing our custom build of noir for the monorepo. + +# Syncing with the main Noir repository + +In order to keep aztec-packages in step with the main Noir repository we need to periodically sync between them. + +Syncing from aztec-packages into noir currently attempts to revert any changes in Noir since the last sync so it's recommended to always sync from Noir first to ensure that aztec-packages is up-to-date. + +## Syncing from Noir to aztec-packages. + +To start the sync run [this action](https://github.com/AztecProtocol/aztec-packages/actions/workflows/pull-noir.yml) manually (click the "Run Workflow" button in the top right). aztec-bot will then open a new PR which does the initial sync, this will have merge conflicts with master which will need to be resolved. + +Most of these will be due to simultaneous development in the two repositories but there are a few cases which are due to the sync process: +1. Replace the dependency on `@aztec/bb.js` in `noir-lang/noir_js_backend_barretenberg` to use the version built in this repository: + a. To do this, search for instances of `"@aztec/bb.js":` within package.json files and replacing the versions with `"portal:../../../../barretenberg/ts"` (number of directories to go up by may vary) +2. Run `yarn install` in `noir/noir-repo` in order to update `yarn.lock`. +3. Run a search and replace on `require_command wasm-opt` to `#require_command wasm-opt` + +We need to do this as `noir-lang/noir` uses a fixed version of barretenberg but in aztec-packages we test against the version of barretenberg built from the same commit. + +## Syncing from aztec-packages to Noir. + +When syncing from aztec-packages to Noir it's important to check that the latest release of `bb` uses the same ACIR serialization format as the current master commit. This is because Noir uses a released version of barretenberg rather than being developed in sync with it, it's then not possible to sync if there's been serialization changes since the last release. + +To start the sync run [this action](https://github.com/AztecProtocol/aztec-packages/actions/workflows/mirror-noir-subrepo.yml) manually (click the "Run Workflow" button in the top right). aztec-bot will then open a new PR in the `noir-lang/noir` repository which does the initial sync, this will have merge conflicts with master which will need to be resolved. + +Most of these will be due to simultaneous development in the two repositories but there are a few cases which are due to the sync process: +1. Replace the dependency on `@aztec/bb.js` in `noir-lang/noir_js_backend_barretenberg` to use the latest `aztec-packages` release version +2. Run `yarn install` in order to update `yarn.lock`. +3. Run a search and replace on `#require_command wasm-opt` to `require_command wasm-opt` \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index e6098dd269c..c8a8be998e6 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -399,6 +399,11 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Download bb binary + run: | + # Adds `bb` to PATH + ./scripts/install_bb.sh + - name: Download nargo binary uses: actions/download-artifact@v4 with: diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 859579c077f..63a40ee1320 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -451,26 +451,6 @@ dependencies = [ "regex", ] -[[package]] -name = "backend-interface" -version = "0.29.0" -dependencies = [ - "acvm", - "bb_abstraction_leaks", - "build-target", - "const_format", - "dirs", - "flate2", - "reqwest", - "serde", - "serde_json", - "tar", - "tempfile", - "test-binary", - "thiserror", - "tracing", -] - [[package]] name = "backtrace" version = "0.3.68" @@ -510,15 +490,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bb_abstraction_leaks" -version = "0.11.0" -dependencies = [ - "acvm", - "build-target", - "const_format", -] - [[package]] name = "bincode" version = "1.3.3" @@ -681,12 +652,6 @@ dependencies = [ "safe-regex", ] -[[package]] -name = "build-target" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832133bbabbbaa9fbdba793456a2827627a7d2b8fb96032fa1e7666d7895832b" - [[package]] name = "bumpalo" version = "3.13.0" @@ -1569,15 +1534,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" -[[package]] -name = "encoding_rs" -version = "0.8.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "endian-type" version = "0.1.2" @@ -2012,25 +1968,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap 2.0.0", - "slab", - "tokio", - "tokio-util 0.7.10", - "tracing", -] - [[package]] name = "half" version = "1.8.2" @@ -2150,7 +2087,6 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", "http", "http-body", "httparse", @@ -2164,20 +2100,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http", - "hyper", - "rustls", - "tokio", - "tokio-rustls", -] - [[package]] name = "iai" version = "0.1.1" @@ -2365,12 +2287,6 @@ dependencies = [ "cfg-if 1.0.0", ] -[[package]] -name = "ipnet" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" - [[package]] name = "is-terminal" version = "0.4.9" @@ -2769,12 +2685,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -2847,7 +2757,6 @@ dependencies = [ "assert_cmd", "assert_fs", "async-lsp", - "backend-interface", "bn254_blackbox_solver", "build-data", "clap", @@ -2858,7 +2767,6 @@ dependencies = [ "dap", "dirs", "fm", - "hex", "iai", "iter-extended", "nargo", @@ -4019,45 +3927,6 @@ dependencies = [ "bytecheck", ] -[[package]] -name = "reqwest" -version = "0.11.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" -dependencies = [ - "base64 0.21.2", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-rustls", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding 2.3.0", - "pin-project-lite", - "rustls", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-rustls", - "tower-service", - "url 2.4.0", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "rexpect" version = "0.5.0" @@ -4091,36 +3960,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if 1.0.0", - "getrandom 0.2.10", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.52.0", -] - [[package]] name = "rkyv" version = "0.7.42" @@ -4218,37 +4057,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "rustls" -version = "0.21.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki", - "sct", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" -dependencies = [ - "base64 0.21.2", -] - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - [[package]] name = "rustversion" version = "1.0.14" @@ -4381,16 +4189,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - [[package]] name = "seahash" version = "4.1.0" @@ -4522,18 +4320,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "serde_with" version = "3.2.0" @@ -4716,18 +4502,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "spki" version = "0.6.0" @@ -4851,17 +4625,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" -[[package]] -name = "tar" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" -dependencies = [ - "filetime", - "libc", - "xattr", -] - [[package]] name = "target-lexicon" version = "0.12.11" @@ -5073,16 +4836,6 @@ dependencies = [ "syn 2.0.32", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.15" @@ -5120,7 +4873,6 @@ dependencies = [ "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -5367,18 +5119,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" version = "1.7.2" @@ -5751,12 +5491,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" - [[package]] name = "winapi" version = "0.3.9" @@ -5981,16 +5715,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if 1.0.0", - "windows-sys 0.48.0", -] - [[package]] name = "wyz" version = "0.5.1" @@ -6000,15 +5724,6 @@ dependencies = [ "tap", ] -[[package]] -name = "xattr" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" -dependencies = [ - "libc", -] - [[package]] name = "zerocopy" version = "0.7.32" diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index f744d6d0cf5..b5a5c68d736 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -13,8 +13,6 @@ members = [ "compiler/fm", "compiler/wasm", # Crates related to tooling built on top of the Noir compiler - "tooling/backend_interface", - "tooling/bb_abstraction_leaks", "tooling/lsp", "tooling/debugger", "tooling/nargo", diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index 1afe0a30068..921bad4b612 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -75,7 +75,8 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { for i in 0..{0}.len() {{ args_acc = args_acc.append(hash_{0}[i].as_slice()); }}\n", - param_name, typ.typ + param_name, + typ.typ.to_string().replace("plain::", "") ) } _ => { diff --git a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh index e377a3ee3f8..abc26c4c465 100644 --- a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,26 +1,34 @@ #!/usr/bin/env bash +NARGO_BACKEND_PATH=${NARGO_BACKEND_PATH:-bb} + self_path=$(dirname "$(readlink -f "$0")") repo_root=$self_path/../../.. -# Run codegen-verifier for 1_mul +# We want to move all the contracts to the root of compiler/integration-tests +contracts_dir=$self_path/../contracts +rm -rf $contracts_dir +mkdir $contracts_dir + +KEYS=$(mktemp -d) + +# Codegen verifier contract for 1_mul mul_dir=$repo_root/test_programs/execution_success/1_mul -nargo --program-dir $mul_dir codegen-verifier +nargo --program-dir $mul_dir compile +$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul +$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol -# Run codegen-verifier for assert_statement +# Codegen verifier contract for assert_statement assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement -nargo --program-dir $assert_statement_dir codegen-verifier +nargo --program-dir $assert_statement_dir compile +$NARGO_BACKEND_PATH write_vk -b $assert_statement_dir/target/assert_statement.json -o $KEYS/assert_statement +$NARGO_BACKEND_PATH contract -k $KEYS/assert_statement -o $contracts_dir/assert_statement.sol -# Run codegen-verifier for recursion +# Codegen verifier contract for recursion recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion -nargo --program-dir $recursion_dir codegen-verifier - -# Copy compiled contracts from the root of compiler/integration-tests -contracts_dir=$self_path/../contracts -rm -rf $contracts_dir -mkdir $contracts_dir +nargo --program-dir $recursion_dir compile +$NARGO_BACKEND_PATH write_vk -b $recursion_dir/target/recursion.json -o $KEYS/recursion +$NARGO_BACKEND_PATH contract -k $KEYS/recursion ./ -o $contracts_dir/recursion.sol -cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol -cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol -cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol +rm -rf $KEYS \ No newline at end of file diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 732bd3cbc59..38e9bdfa8b8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -7,7 +7,7 @@ use super::{ }; use acvm::{acir::brillig::MemoryAddress, FieldElement}; -pub(crate) const MAX_STACK_SIZE: usize = 1024; +pub(crate) const MAX_STACK_SIZE: usize = 2048; impl BrilligContext { /// Creates an entry point artifact that will jump to the function label provided. diff --git a/noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json b/noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json new file mode 100644 index 00000000000..27a8e89228d --- /dev/null +++ b/noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 1, + "label": "Install Barretenberg", + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md b/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md new file mode 100644 index 00000000000..f435ae151fe --- /dev/null +++ b/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md @@ -0,0 +1,54 @@ +--- +title: Barretenberg Installation +description: + `bb` is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` +keywords: [ + Barretenberg + bb + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches +] +pagination_next: getting_started/hello_noir/index +--- + +`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. + +## Installing `bb` + +Open a terminal on your machine, and write: + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-aarch64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-aarch64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-aarch64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-x86_64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-linux-gnu.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-linux-gnu.tar.gz && \ +tar -xvf ./barretenberg-x86_64-linux-gnu.tar.gz -C $HOME/.barretenberg/ && \ +echo -e 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc && \ +source ~/.bashrc +``` + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/_category_.json b/noir/noir-repo/docs/docs/getting_started/hello_noir/_category_.json index 23b560f610b..976a2325de0 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/_category_.json +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/_category_.json @@ -1,5 +1,5 @@ { - "position": 1, + "position": 2, "collapsible": true, "collapsed": true } diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md index 743c4d8d634..1ade3f09ae3 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md @@ -90,13 +90,11 @@ cd hello_world nargo check ``` -Two additional files would be generated in your project directory: +A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. +## Execute Our Noir Program -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution of our Noir program. +Now that the project is set up, we can execute our Noir program. Fill in input values for execution in the _Prover.toml_ file. For example: @@ -105,37 +103,42 @@ x = "1" y = "2" ``` -Prove the valid execution of your Noir program: +Execute your Noir program: ```sh -nargo prove +nargo execute witness-name ``` -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`.proof`, where the project name is defined in Nargo.toml. +The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): +## Prove Our Noir Program -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" +:::info + +Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). + +::: + +Prove the valid execution of your Noir program using `bb`: + +```sh +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof ``` -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. +A new file called `proof` will be generated in your project directory, containing the generated proof for your program. ## Verify Our Noir Program -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. +Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. Verify your proof by running: ```sh -nargo verify +bb write_vk -b ./target/hello_world.json -o ./target/vk +bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. +The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md index 6160a102c6c..29688df148f 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -1,10 +1,10 @@ --- title: Project Breakdown description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. + Learn about the anatomy of a Nargo project, including the purpose of the Prover TOML + file, and how to prove and verify your program. keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] + [Nargo, Nargo project, Prover.toml, proof verification, private asset transfer] sidebar_position: 2 --- @@ -18,7 +18,6 @@ commands, you would get a minimal Nargo project of the following structure: - src - Prover.toml - - Verifier.toml - Nargo.toml The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ @@ -28,10 +27,6 @@ file will be generated within it. _Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. -### Verifier.toml - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - ### Nargo.toml _Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. @@ -92,20 +87,15 @@ fn main(x : Field, y : Field) { } ``` -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when verifying the proof. The prover supplies the values for `x` and `y` in the _Prover.toml_ file. -As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is constrained by the proof of the execution of said program (i.e. if the condition was not met, the verifier would reject the proof as an invalid proof). ### Prover.toml -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). +The _Prover.toml_ file is a file which the prover uses to supply the inputs to the Noir program (both private and public). In our hello world program the _Prover.toml_ file looks like this: @@ -114,12 +104,9 @@ x = "1" y = "2" ``` -When the command `nargo prove` is executed, two processes happen: - -1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, - is not equal. This inequality constraint is due to the line `assert(x != y)`. +When the command `nargo execute` is executed, nargo will execute the Noir program using the inputs specified in `Prover.toml`, aborting if it finds that these do not satisfy the constraints defined by `main`. In this example, `x` and `y` must satisfy the inequality constraint `assert(x != y)`. -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. +If an output name is specified such as `nargo execute foo`, the witness generated by this execution will be written to `./target/foo.gz`. This can then be used to generate a proof of the execution. #### Arrays of Structs @@ -155,45 +142,18 @@ baz = 2 #### Custom toml files -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. +You can specify a `toml` file with a different name to use for execution by using the `--prover-name` or `-p` flags. -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the default **Prover.toml** and generates the witness and saves it at `./target/foo.gz`: ```bash -nargo prove +nargo execute foo ``` -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the custom **OtherProver.toml** and generates the witness and saves it at `./target/bar.gz`: ```bash -nargo prove -p OtherProver +nargo execute -p OtherProver bar ``` -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs, usually from external sources, and -verify the validity of the proof against it. - -Take a private asset transfer as an example: - -A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md b/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md index d65151da0ab..1c040585340 100644 --- a/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md +++ b/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md @@ -2,7 +2,7 @@ title: Noir Codegen for TypeScript description: Learn how to use Noir codegen to generate TypeScript bindings keywords: [Nargo, Noir, compile, TypeScript] -sidebar_position: 2 +sidebar_position: 3 --- When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. diff --git a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md index 8cf8035a5c4..5f427f1e23f 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md @@ -177,7 +177,7 @@ interface ForeignCallResult { ## Step 3 - Usage with Nargo -Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test` and `nargo execute` commands by passing a value to `--oracle-resolver`. For example: ```bash nargo test --oracle-resolver http://localhost:5555 @@ -203,7 +203,7 @@ As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_j Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? -You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. +You don't technically have to, but then how would you run `nargo test`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. ::: diff --git a/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md b/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md index e3c7c1065da..7c96e22b8d5 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md @@ -43,11 +43,19 @@ Generating a Solidity Verifier contract is actually a one-command process. Howev This is by far the most straight-forward step. Just run: ```sh -nargo codegen-verifier +nargo compile ``` -A new `contract` folder would then be generated in your project directory, containing the Solidity -file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. +This will compile your source code into a Noir build artifact to be stored in the `./target` directory, you can then generate the smart contract using the commands: + +```sh +# Here we pass the path to the newly generated Noir artifact. +bb write_vk -b ./target/.json +bb contract +``` + +replacing `` with the name of your Noir project. A new `contract` folder would then be generated in your project directory, containing the Solidity +file `contract.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. :::info @@ -123,11 +131,25 @@ To verify a proof using the Solidity verifier contract, we call the `verify` fun function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) ``` -When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. +First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/hello_noir/index.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the + +```bash +# This value must be changed to match the number of public inputs (including return values!) in your program. +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) ./proof | od -An -v -t x1 | tr -d $' \n') + +echo "Public inputs:" +echo $HEX_PUBLIC_INPUTS + +echo "Proof:" +echo "0x$HEX_PROOF" ``` -0x...... , [0x0000.....02] -``` + +Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up into 32 byte chunks which are prefixed with `0x` accordingly. A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): @@ -144,11 +166,9 @@ function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 null :::info[Return Values] -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in Noir. -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. +Under the hood, the return value is passed as an input to the circuit and is checked at the end of the circuit program. For example, if you have Noir program like this: @@ -162,11 +182,11 @@ fn main( ) -> pub Field ``` -the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. -In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. ::: diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/booleans.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/booleans.md index 69826fcd724..3dcfa836814 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/booleans.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/booleans.md @@ -23,9 +23,6 @@ fn main() { } ``` -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - The boolean type is most commonly used in conditionals like `if` expressions and `assert` statements. More about conditionals is covered in the [Control Flow](../control_flow) and [Assert Function](../assert) sections. diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index 6b2d3773912..c14fffa7174 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -115,7 +115,7 @@ y = "1" Would result in: ``` -$ nargo prove +$ nargo execute error: Assertion failed: 'attempt to add with overflow' ┌─ ~/src/main.nr:9:13 │ diff --git a/noir/noir-repo/examples/codegen-verifier/.gitignore b/noir/noir-repo/examples/codegen-verifier/.gitignore new file mode 100644 index 00000000000..c0d62c447d3 --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/.gitignore @@ -0,0 +1,4 @@ +out +cache +target +src/contract.sol \ No newline at end of file diff --git a/noir/noir-repo/examples/codegen-verifier/Nargo.toml b/noir/noir-repo/examples/codegen-verifier/Nargo.toml new file mode 100644 index 00000000000..2b367f30dbc --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/examples/codegen-verifier/Prover.toml b/noir/noir-repo/examples/codegen-verifier/Prover.toml new file mode 100644 index 00000000000..2c1854573a4 --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/Prover.toml @@ -0,0 +1,2 @@ +x = 1 +y = 2 diff --git a/noir/noir-repo/examples/codegen-verifier/codegen_verifier.sh b/noir/noir-repo/examples/codegen-verifier/codegen_verifier.sh new file mode 100755 index 00000000000..fabd6235a67 --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/codegen_verifier.sh @@ -0,0 +1,38 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo compile + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND contract -o ./src/contract.sol + +# We now generate a proof and check whether the verifier contract will verify it. + +nargo execute witness + +PROOF_PATH=./target/proof +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz -o $PROOF_PATH + +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=$((32 * $NUM_PUBLIC_INPUTS)) +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') + +# Spin up an anvil node to deploy the contract to +anvil & + +DEPLOY_INFO=$(forge create UltraVerifier \ + --rpc-url "127.0.0.1:8545" \ + --private-key "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" \ + --json) +VERIFIER_ADDRESS=$(echo $DEPLOY_INFO | jq -r '.deployedTo') + +# Call the verifier contract with our proof. +# Note that we haven't needed to split up `HEX_PUBLIC_INPUTS` as there's only a single public input +cast call $VERIFIER_ADDRESS "verify(bytes, bytes32[])(bool)" "0x$HEX_PROOF" "[0x$HEX_PUBLIC_INPUTS]" + +# Stop anvil node again +kill %- \ No newline at end of file diff --git a/noir/noir-repo/examples/codegen-verifier/foundry.toml b/noir/noir-repo/examples/codegen-verifier/foundry.toml new file mode 100644 index 00000000000..25b918f9c9a --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/foundry.toml @@ -0,0 +1,6 @@ +[profile.default] +src = "src" +out = "out" +libs = ["lib"] + +# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options diff --git a/noir/noir-repo/examples/codegen-verifier/src/main.nr b/noir/noir-repo/examples/codegen-verifier/src/main.nr new file mode 100644 index 00000000000..baef0c3786a --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/noir/noir-repo/examples/codegen-verifier/test.sh b/noir/noir-repo/examples/codegen-verifier/test.sh new file mode 100755 index 00000000000..93c2f6edf51 --- /dev/null +++ b/noir/noir-repo/examples/codegen-verifier/test.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -f ./src/contract.sol + +./codegen_verifier.sh + +if ! [ -f ./src/contract.sol ]; then + printf '%s\n' "Contract not written to file" >&2 + exit 1 +fi \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/Nargo.toml b/noir/noir-repo/examples/prove_and_verify/Nargo.toml new file mode 100644 index 00000000000..2b367f30dbc --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/Prover.toml b/noir/noir-repo/examples/prove_and_verify/Prover.toml new file mode 100644 index 00000000000..8c12ebba6cf --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "2" diff --git a/noir/noir-repo/examples/prove_and_verify/proofs/proof b/noir/noir-repo/examples/prove_and_verify/proofs/proof new file mode 100644 index 00000000000..01d5ad27686 Binary files /dev/null and b/noir/noir-repo/examples/prove_and_verify/proofs/proof differ diff --git a/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh b/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh new file mode 100755 index 00000000000..01ee6c70738 --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo execute witness + +# TODO: `bb` should create `proofs` directory if it doesn't exist. +mkdir -p proofs +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND verify -v ./target/vk -p ./proofs/proof \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/src/main.nr b/noir/noir-repo/examples/prove_and_verify/src/main.nr new file mode 100644 index 00000000000..baef0c3786a --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/test.sh b/noir/noir-repo/examples/prove_and_verify/test.sh new file mode 100755 index 00000000000..a8ae3cca132 --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -rf ./target ./proofs + +./prove_and_verify.sh \ No newline at end of file diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh new file mode 100755 index 00000000000..4ee5bbbbe47 --- /dev/null +++ b/noir/noir-repo/scripts/install_bb.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# We use this script just for CI so we assume we're running on x86 linux + +mkdir -p $HOME/.barretenberg +curl -o ./barretenberg-aarch64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-aarch64-apple-darwin.tar.gz +tar -xvf ./barretenberg-aarch64-apple-darwin.tar.gz -C $HOME/.barretenberg/ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc +source ~/.bashrc diff --git a/noir/noir-repo/tooling/backend_interface/CHANGELOG.md b/noir/noir-repo/tooling/backend_interface/CHANGELOG.md deleted file mode 100644 index 9ebde989add..00000000000 --- a/noir/noir-repo/tooling/backend_interface/CHANGELOG.md +++ /dev/null @@ -1,233 +0,0 @@ -# Changelog - -## [0.11.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.1...v0.11.0) (2023-08-18) - - -### ⚠ BREAKING CHANGES - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) - -### Features - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) ([d8342fd](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d8342fd6da605ac3bbd889edf89cd122bc4689ce)) - -## [0.10.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.0...v0.10.1) (2023-08-18) - - -### Features - -* Migrate to `wasmer` 3.3.0 ([#236](https://github.com/noir-lang/acvm-backend-barretenberg/issues/236)) ([e115e38](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e115e38856887c6b1eeead3534534ac7e6327ea9)) - -## [0.10.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.1...v0.10.0) (2023-07-26) - - -### ⚠ BREAKING CHANGES - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) - -### Features - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) ([15c8676](https://github.com/noir-lang/acvm-backend-barretenberg/commit/15c86768685d2946a767c350f6ef5972c86677eb)) - -## [0.9.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.0...v0.9.1) (2023-07-21) - - -### Features - -* add support for atomic memory opcodes ([#232](https://github.com/noir-lang/acvm-backend-barretenberg/issues/232)) ([a7aa6e9](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a7aa6e9505bb402c1b3db0a990845ed26928e7aa)) - -## [0.9.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.8.0...v0.9.0) (2023-07-17) - - -### ⚠ BREAKING CHANGES - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) - -### Miscellaneous Chores - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) ([3f1d967](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3f1d9674b904acb02c2a3e52481be8a6104c3a9d)) - -## [0.8.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.7.0...v0.8.0) (2023-07-12) - - -### ⚠ BREAKING CHANGES - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) - -### Features - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) ([397098b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/397098b239efbe16785b1c9af108ca9fc4e24497)) - -## [0.7.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.1...v0.7.0) (2023-07-08) - - -### ⚠ BREAKING CHANGES - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) - -### Features - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) ([ceb4770](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ceb47705a492fcdcea1f3c098aaab42ea8edbf2e)) - -## [0.6.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.0...v0.6.1) (2023-07-06) - - -### Features - -* switch RecursiveAggregation support to true ([#225](https://github.com/noir-lang/acvm-backend-barretenberg/issues/225)) ([e9462ae](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e9462ae015ec0dfb0a23ccbb89562071f87940f5)) - -## [0.6.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.1...v0.6.0) (2023-07-06) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) - -### Features - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) ([062d5ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/062d5ed9b476fab8ac8d3ca13371699fb2aac332)) - -## [0.5.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.0...v0.5.1) (2023-06-20) - - -### Bug Fixes - -* Remove wasm32 target ([#219](https://github.com/noir-lang/acvm-backend-barretenberg/issues/219)) ([e4cbb6d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e4cbb6d476e8746de33c38506e2fcb970f1c866a)) - -## [0.5.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.4.0...v0.5.0) (2023-06-15) - - -### ⚠ BREAKING CHANGES - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) - -### Features - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) ([9331898](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9331898f161321c8b6a82d5ea850f197952b2ed2)) - -## [0.4.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.3.0...v0.4.0) (2023-06-07) - - -### ⚠ BREAKING CHANGES - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) - -### Features - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) ([6fc479b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6fc479b9ae99d59bbfeb1b895d63cdbea469dcaa)) - -## [0.3.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.2.0...v0.3.0) (2023-06-01) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) - -### Features - -* added keccakvar constraints ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) ([298446e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/298446ef8b69f528b6e2fd2abb2298d7b0a8118e)) - - -### Bug Fixes - -* Add or cleanup implementations for JS target ([#199](https://github.com/noir-lang/acvm-backend-barretenberg/issues/199)) ([f6134b7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/f6134b7b502cb74882300b0046ab91ab000daf3c)) -* update pedersen hashes for new impl ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) - -## [0.2.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.2...v0.2.0) (2023-05-22) - - -### ⚠ BREAKING CHANGES - -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) - -### Features - -* Add serde to `ConstraintSystem` types ([#196](https://github.com/noir-lang/acvm-backend-barretenberg/issues/196)) ([4c04a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/4c04a79e6d2b0115f3b4526c60f9f7dae8b464ae)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) ([3d3847d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3d3847de70e74a8f65c64e165ad15ae3d31f5350)) -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) ([d613c79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d613c79584a599f4adbd11d2ce3b61403c185b73)) - -## [0.1.2](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.1...v0.1.2) (2023-05-11) - - -### Bug Fixes - -* Remove star dependencies to allow publishing ([#182](https://github.com/noir-lang/acvm-backend-barretenberg/issues/182)) ([1727a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/1727a79ce7e66d95528f70c445cb4ec1b1ece636)) - -## [0.1.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.0...v0.1.1) (2023-05-11) - - -### Bug Fixes - -* Add description so crate can be published ([#180](https://github.com/noir-lang/acvm-backend-barretenberg/issues/180)) ([caabf94](https://github.com/noir-lang/acvm-backend-barretenberg/commit/caabf9434031c6023a5e3a436c87fba0a1072539)) - -## 0.1.0 (2023-05-10) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) - -### Features - -* **acvm_interop:** Updates to reflect new acvm methods using pk/vk ([#50](https://github.com/noir-lang/acvm-backend-barretenberg/issues/50)) ([cff757d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/cff757dca7971161e4bd25e7a744d910c37c22be)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) ([ce2b9ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ce2b9ed456bd8d2ad8357c15736d62c2a5812add)) -* allow overriding transcript location with BARRETENBERG_TRANSCRIPT env var ([#86](https://github.com/noir-lang/acvm-backend-barretenberg/issues/86)) ([af92b99](https://github.com/noir-lang/acvm-backend-barretenberg/commit/af92b99c7b5f37e9659931af378a851b3658a80b)) -* **ci:** add concurrency group for rust workflow ([#63](https://github.com/noir-lang/acvm-backend-barretenberg/issues/63)) ([5c936bc](https://github.com/noir-lang/acvm-backend-barretenberg/commit/5c936bc63cc3adcf9d43c9c4ce69053566089ad9)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) ([8437bf7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8437bf7e08acadf43b55b307545336596a9fe766)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) ([8ed67d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8ed67d68c71d655e1a6a5c38fa9ea1c3566f771d)) -* Leverage rustls when using downloader crate ([#46](https://github.com/noir-lang/acvm-backend-barretenberg/issues/46)) ([9de36b6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9de36b642d125d1fb4facd1bf60db67946be70ae)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) ([ba1d0d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ba1d0d61b94de91b15044d97608907c21bfb5299)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) ([c9fb9e8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c9fb9e806f1400a2ff7594a0669bec56025220bb)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) ([8fe7111](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8fe7111ebdcb043764a83436744662e8c3ca5abc)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) ([14d8a5b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/14d8a5b893eb1cb91d5bde908643b487b41809d6)) -* replace `downloader` dependency with `reqwest` ([#114](https://github.com/noir-lang/acvm-backend-barretenberg/issues/114)) ([dd62231](https://github.com/noir-lang/acvm-backend-barretenberg/commit/dd62231b8bfcee32e1029d31a07895b16159339c)) -* return boolean from `verify_signature` ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* return boolean rather than `FieldElement` from `check_membership` ([#124](https://github.com/noir-lang/acvm-backend-barretenberg/issues/124)) ([a0a338e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a0a338e2295635a07f6b9e497c029160a5f323bc)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* store transcript in `.nargo/backends` directory ([#91](https://github.com/noir-lang/acvm-backend-barretenberg/issues/91)) ([c6b5023](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c6b50231da065e7550bfe8bddf8e46f4cd8002d7)) -* update `aztec_backend_wasm` to use new serialization ([#94](https://github.com/noir-lang/acvm-backend-barretenberg/issues/94)) ([28014d8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/28014d803d052a7f459e03dbd7b5b9210449b1d0)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) ([ff350fb](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ff350fb111043964b8a14fc0df62508c87506423)) -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) ([9202415](https://github.com/noir-lang/acvm-backend-barretenberg/commit/92024155532e15f25acb2f3ed8d5ca78da0fddd9)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) ([74b4d8d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/74b4d8d8b118e4477880c04149e5e9d93d388384)) - - -### Bug Fixes - -* Avoid exposing internals of Assignments type ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* fix serialization of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) -* Implement random_get for wasm backend ([#102](https://github.com/noir-lang/acvm-backend-barretenberg/issues/102)) ([9c0f06e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9c0f06ef56f23e2b5794e810f433e36ff2c5d6b5)) -* rename gates to opcodes ([#59](https://github.com/noir-lang/acvm-backend-barretenberg/issues/59)) ([6e05307](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6e053072d8b9c5d93c296f10782251ccb597f902)) -* reorganize and ensure contracts can be compiled in Remix ([#112](https://github.com/noir-lang/acvm-backend-barretenberg/issues/112)) ([7ec5693](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7ec5693f194a79c379ae2952bc17a31ee63a42b9)) -* replace `serialize_circuit` function with `from<&Circuit>` ([#118](https://github.com/noir-lang/acvm-backend-barretenberg/issues/118)) ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Replace serialize_circuit function with `from<&Circuit>` ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Update bb-sys to resolve bugs in some environments ([#129](https://github.com/noir-lang/acvm-backend-barretenberg/issues/129)) ([e3d4504](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e3d4504f15e1295e637c4da80b1d08c87c267c45)) -* Update dependency containing pk write fix for large general circuits ([#78](https://github.com/noir-lang/acvm-backend-barretenberg/issues/78)) ([2cb523d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/2cb523d2ab95249157b22e198d9dcd6841c3eed8)) -* Update to bb-sys 0.1.1 and update bb in lockfile ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* update to bb-sys 0.1.1 and update bb in lockfile ([#111](https://github.com/noir-lang/acvm-backend-barretenberg/issues/111)) ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* use `Barretenberg.call` to query circuit size from wasm ([#121](https://github.com/noir-lang/acvm-backend-barretenberg/issues/121)) ([a775af1](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a775af14137cc7bc2e9d8a063fa718a5a9abe6cb)) - - -### Miscellaneous Chores - -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) ([ad0c422](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ad0c4228488457bd155ff381186ecf583f18bfac)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) ([6c03687](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6c036870a6a8e26612ab8b4f90a162f7540b42e2)) diff --git a/noir/noir-repo/tooling/backend_interface/Cargo.toml b/noir/noir-repo/tooling/backend_interface/Cargo.toml deleted file mode 100644 index b731c138c7d..00000000000 --- a/noir/noir-repo/tooling/backend_interface/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "backend-interface" -description = "The definition of the backend CLI interface which Nargo uses for proving/verifying ACIR circuits." -version.workspace = true -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true -dirs.workspace = true -thiserror.workspace = true -serde.workspace = true -serde_json.workspace = true -bb_abstraction_leaks.workspace = true -tracing.workspace = true - -tempfile.workspace = true - -## bb binary downloading -tar = "~0.4.15" -flate2 = "~1.0.1" -reqwest = { version = "0.11.20", default-features = false, features = [ - "rustls-tls", - "blocking", -] } - -[dev-dependencies] -test-binary = "3.0.1" - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/contract.rs b/noir/noir-repo/tooling/backend_interface/src/cli/contract.rs deleted file mode 100644 index 935b96b3ac4..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/contract.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VerifyCommand will call the barretenberg binary -/// to return a solidity library with the verification key -/// that can be used to verify proofs on-chain. -/// -/// This does not return a Solidity file that is able -/// to verify a proof. See acvm_interop/contract.sol for the -/// remaining logic that is missing. -pub(crate) struct ContractCommand { - pub(crate) crs_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ContractCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("contract") - .arg("-c") - .arg(self.crs_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - - if output.status.success() { - String::from_utf8(output.stdout) - .map_err(|error| BackendError::InvalidUTF8Vector(error.into_bytes())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn contract_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("program.json"); - let vk_path = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&artifact_path).expect("file should be created"); - - let write_vk_command = super::WriteVkCommand { - artifact_path, - vk_path_output: vk_path.clone(), - crs_path: crs_path.clone(), - }; - write_vk_command.run(backend.binary_path())?; - - let contract_command = ContractCommand { vk_path, crs_path }; - contract_command.run(backend.binary_path())?; - - drop(temp_directory); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs b/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs deleted file mode 100644 index ce6c6cebfd3..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs +++ /dev/null @@ -1,70 +0,0 @@ -use serde::Deserialize; -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// GatesCommand will call the barretenberg binary -/// to return the number of gates needed to create a proof -/// for the given bytecode. -pub(crate) struct GatesCommand { - pub(crate) crs_path: PathBuf, - pub(crate) artifact_path: PathBuf, -} - -#[derive(Deserialize)] -struct GatesResponse { - functions: Vec, -} - -#[derive(Deserialize)] -pub struct CircuitReport { - pub acir_opcodes: u32, - pub circuit_size: u32, -} - -impl GatesCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let output = std::process::Command::new(binary_path) - .arg("gates") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.artifact_path) - .output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - - let gates_info: GatesResponse = - serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - - Ok(gates_info.functions) - } -} - -#[test] -fn gate_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("program.json"); - let crs_path = backend.backend_directory(); - - std::fs::File::create(&artifact_path).expect("file should be created"); - - let gate_command = GatesCommand { crs_path, artifact_path }; - - let output = gate_command.run(backend.binary_path())?; - // Mock backend always returns zero gates. - assert_eq!(output.len(), 1); - assert_eq!(output[0].acir_opcodes, 123); - assert_eq!(output[0].circuit_size, 125); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs deleted file mode 100644 index 16a9517e129..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -// Reference: https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/main.cpp - -mod contract; -mod gates; -mod proof_as_fields; -mod prove; -mod verify; -mod version; -mod vk_as_fields; -mod write_vk; - -pub(crate) use contract::ContractCommand; -pub(crate) use gates::GatesCommand; -pub(crate) use proof_as_fields::ProofAsFieldsCommand; -pub(crate) use prove::ProveCommand; -pub(crate) use verify::VerifyCommand; -pub(crate) use version::VersionCommand; -pub(crate) use vk_as_fields::VkAsFieldsCommand; -pub(crate) use write_vk::WriteVkCommand; - -pub(crate) use gates::CircuitReport; - -#[test] -fn no_command_provided_works() -> Result<(), crate::BackendError> { - // This is a simple test to check that the binaries work - - let backend = crate::get_mock_backend()?; - - let output = std::process::Command::new(backend.binary_path()).output()?; - - let stderr = string_from_stderr(&output.stderr); - // Assert help message is printed due to no command being provided. - assert!(stderr.contains("Usage: mock_backend ")); - - Ok(()) -} - -// Converts a stderr byte array to a string (including invalid characters) -fn string_from_stderr(stderr: &[u8]) -> String { - String::from_utf8_lossy(stderr).to_string() -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs b/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs deleted file mode 100644 index 7eb1c1ef35c..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// `ProofAsFieldsCommand` will call the barretenberg binary -/// to split a proof into a representation as [`FieldElement`]s. -pub(crate) struct ProofAsFieldsCommand { - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ProofAsFieldsCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("proof_as_fields") - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs b/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs deleted file mode 100644 index 30a27048b48..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// ProveCommand will call the barretenberg binary -/// to create a proof, given the witness and the bytecode. -/// -/// Note:Internally barretenberg will create and discard the -/// proving key, so this is not returned. -/// -/// The proof will be written to the specified output file. -pub(crate) struct ProveCommand { - pub(crate) crs_path: PathBuf, - pub(crate) artifact_path: PathBuf, - pub(crate) witness_path: PathBuf, -} - -impl ProveCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("prove") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.artifact_path) - .arg("-w") - .arg(self.witness_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - Ok(output.stdout) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn prove_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - - std::fs::File::create(&artifact_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let crs_path = backend.backend_directory(); - let prove_command = ProveCommand { crs_path, artifact_path, witness_path }; - - let proof = prove_command.run(backend.binary_path())?; - assert_eq!(proof, "proof".as_bytes()); - drop(temp_directory); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs b/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs deleted file mode 100644 index beea4bbec7d..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -/// VerifyCommand will call the barretenberg binary -/// to verify a proof -pub(crate) struct VerifyCommand { - pub(crate) crs_path: PathBuf, - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl VerifyCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("verify") - .arg("-c") - .arg(self.crs_path) - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path); - - let output = command.output()?; - - // We currently do not distinguish between an invalid proof and an error inside the backend. - Ok(output.status.success()) - } -} - -#[test] -fn verify_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - use super::{ProveCommand, WriteVkCommand}; - use crate::proof_system::write_to_file; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("acir.json"); - let witness_path = temp_directory_path.join("witness.tr"); - let proof_path = temp_directory_path.join("1_mul.proof"); - let vk_path_output = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&artifact_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { - artifact_path: artifact_path.clone(), - crs_path: crs_path.clone(), - vk_path_output: vk_path_output.clone(), - }; - - write_vk_command.run(backend.binary_path())?; - - let prove_command = ProveCommand { crs_path: crs_path.clone(), artifact_path, witness_path }; - let proof = prove_command.run(backend.binary_path())?; - - write_to_file(&proof, &proof_path); - - let verify_command = VerifyCommand { crs_path, proof_path, vk_path: vk_path_output }; - - let verified = verify_command.run(backend.binary_path())?; - assert!(verified); - - drop(temp_directory); - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/version.rs b/noir/noir-repo/tooling/backend_interface/src/cli/version.rs deleted file mode 100644 index 83ab72a870e..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/version.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::path::Path; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VersionCommand will call the backend binary -/// to query installed version. -pub(crate) struct VersionCommand; - -impl VersionCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("--version"); - - let output = command.output()?; - if output.status.success() { - match String::from_utf8(output.stdout) { - Ok(result) => Ok(result), - Err(_) => Err(BackendError::CommandFailed( - "Unexpected output from --version check.".to_owned(), - )), - } - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs b/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs deleted file mode 100644 index 1b0212241c4..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VkAsFieldsCommand will call the barretenberg binary -/// to split a verification key into a representation as [`FieldElement`]s. -/// -/// The hash of the verification key will also be returned. -pub(crate) struct VkAsFieldsCommand { - pub(crate) vk_path: PathBuf, -} - -impl VkAsFieldsCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(FieldElement, Vec), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command.arg("vk_as_fields").arg("-k").arg(self.vk_path).arg("-o").arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - let mut fields: Vec = serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string()))?; - - // The first element of this vector is the hash of the verification key, we want to split that off. - let hash = fields.remove(0); - Ok((hash, fields)) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/write_vk.rs b/noir/noir-repo/tooling/backend_interface/src/cli/write_vk.rs deleted file mode 100644 index 3d51b5a4a8c..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/write_vk.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::path::{Path, PathBuf}; - -use super::string_from_stderr; -use crate::BackendError; - -/// WriteCommand will call the barretenberg binary -/// to write a verification key to a file -pub(crate) struct WriteVkCommand { - pub(crate) crs_path: PathBuf, - pub(crate) artifact_path: PathBuf, - pub(crate) vk_path_output: PathBuf, -} - -impl WriteVkCommand { - #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] - pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("write_vk") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.artifact_path) - .arg("-o") - .arg(self.vk_path_output); - - let output = command.output()?; - if output.status.success() { - Ok(()) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn write_vk_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("program.json"); - let vk_path_output = temp_directory.path().join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&artifact_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { artifact_path, crs_path, vk_path_output }; - - write_vk_command.run(backend.binary_path())?; - drop(temp_directory); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/download.rs b/noir/noir-repo/tooling/backend_interface/src/download.rs deleted file mode 100644 index 60ecb14e642..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/download.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::{ - io::{Cursor, ErrorKind}, - path::Path, -}; - -/// Downloads a zipped archive and unpacks the backend binary to `destination_path`. -/// -/// # Backend Requirements -/// -/// In order for a backend to be compatible with this function: -/// - `backend_url` must serve a gzipped tarball. -/// - The tarball must only contain the backend's binary. -/// - The binary file must be located at the archive root. -pub fn download_backend(backend_url: &str, destination_path: &Path) -> std::io::Result<()> { - use flate2::read::GzDecoder; - use tar::Archive; - use tempfile::tempdir; - - // Download sources - let compressed_file: Cursor> = download_binary_from_url(backend_url).map_err(|_| { - std::io::Error::new( - ErrorKind::Other, - format!("Could not download backend from install url: {backend_url}"), - ) - })?; - - // Unpack the tarball - let gz_decoder = GzDecoder::new(compressed_file); - let mut archive = Archive::new(gz_decoder); - - let temp_directory = tempdir()?; - archive.unpack(&temp_directory)?; - - // Assume that the archive contains a single file which is the backend binary. - let mut archive_files = std::fs::read_dir(&temp_directory)?; - let temp_binary_path = archive_files.next().unwrap()?.path(); - - // Create directory to place binary in. - std::fs::create_dir_all(destination_path.parent().unwrap())?; - - // Rename the binary to the desired name - std::fs::copy(temp_binary_path, destination_path)?; - - drop(temp_directory); - - Ok(()) -} - -/// Try to download the specified URL into a buffer which is returned. -fn download_binary_from_url(url: &str) -> Result>, reqwest::Error> { - let response = reqwest::blocking::get(url)?; - - let bytes = response.bytes()?; - - // TODO: Check SHA of downloaded binary - - Ok(Cursor::new(bytes.to_vec())) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/lib.rs b/noir/noir-repo/tooling/backend_interface/src/lib.rs deleted file mode 100644 index eab98852555..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/lib.rs +++ /dev/null @@ -1,150 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use std::path::PathBuf; - -mod cli; -mod download; -mod proof_system; -mod smart_contract; - -pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; -use bb_abstraction_leaks::BB_VERSION; -use cli::VersionCommand; -pub use download::download_backend; -use tracing::warn; - -const BACKENDS_DIR: &str = ".nargo/backends"; - -pub fn backends_directory() -> PathBuf { - let home_directory = dirs::home_dir().unwrap(); - home_directory.join(BACKENDS_DIR) -} - -#[cfg(test)] -test_binary::build_test_binary_once!(mock_backend, "test-binaries"); - -#[cfg(test)] -fn get_mock_backend() -> Result { - std::env::set_var("NARGO_BACKEND_PATH", path_to_mock_backend()); - - let mock_backend = Backend::new("mock_backend".to_string()); - mock_backend.assert_binary_exists()?; - - Ok(mock_backend) -} - -#[derive(Debug, thiserror::Error)] -pub enum BackendError { - #[error(transparent)] - IoError(#[from] std::io::Error), - - #[error("Backend binary does not exist")] - MissingBinary, - - #[error("The backend responded with a malformed UTF8 byte vector: {0:?}")] - InvalidUTF8Vector(Vec), - - #[error( - "The backend responded with a unexpected number of bytes. Expected: {0} but got {} ({1:?})", .1.len() - )] - UnexpectedNumberOfBytes(usize, Vec), - - #[error("The backend encountered an error: {0:?}")] - CommandFailed(String), -} - -#[derive(Debug)] -pub struct Backend { - name: String, - binary_path: PathBuf, -} - -impl Backend { - pub fn new(name: String) -> Backend { - let binary_path = if let Some(binary_path) = std::env::var_os("NARGO_BACKEND_PATH") { - PathBuf::from(binary_path) - } else { - const BINARY_NAME: &str = "backend_binary"; - - backends_directory().join(&name).join(BINARY_NAME) - }; - Backend { name, binary_path } - } - - pub fn name(&self) -> &str { - &self.name - } - - fn binary_path(&self) -> &PathBuf { - &self.binary_path - } - - fn assert_binary_exists(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.is_file() { - Ok(binary_path) - } else { - if self.name == ACVM_BACKEND_BARRETENBERG { - // If we're trying to use barretenberg, automatically go and install it. - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - return Ok(binary_path); - } - Err(BackendError::MissingBinary) - } - } - - fn backend_directory(&self) -> PathBuf { - self.binary_path() - .parent() - .expect("backend binary should have a parent directory") - .to_path_buf() - } - - fn crs_directory(&self) -> PathBuf { - self.backend_directory().join("crs") - } - - fn assert_correct_version(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.to_string_lossy().contains(ACVM_BACKEND_BARRETENBERG) { - match VersionCommand.run(binary_path) { - // If version matches then do nothing. - Ok(version_string) if version_string == BB_VERSION => (), - - // If version doesn't match then download the correct version. - Ok(version_string) => { - warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - - // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. - Err(_) => { - warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - } - } - Ok(binary_path) - } -} - -#[cfg(test)] -mod backend { - use crate::{Backend, BackendError}; - - #[test] - fn raises_error_on_missing_binary() { - let bad_backend = Backend::new("i_don't_exist".to_string()); - - let binary_path = bad_backend.assert_binary_exists(); - - assert!(matches!(binary_path, Err(BackendError::MissingBinary))); - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs deleted file mode 100644 index 49fd57c968f..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ /dev/null @@ -1,144 +0,0 @@ -use std::io::Write; -use std::path::Path; -use std::{fs::File, path::PathBuf}; - -use acvm::acir::native_types::{WitnessMap, WitnessStack}; -use acvm::FieldElement; -use tempfile::tempdir; -use tracing::warn; - -use crate::cli::{ - CircuitReport, GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, - VkAsFieldsCommand, WriteVkCommand, -}; -use crate::{Backend, BackendError}; - -impl Backend { - pub fn get_exact_circuit_sizes( - &self, - artifact_path: PathBuf, - ) -> Result, BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - GatesCommand { crs_path: self.crs_directory(), artifact_path }.run(binary_path) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn prove( - &self, - artifact_path: PathBuf, - witness_stack: WitnessStack, - num_public_inputs: u32, - ) -> Result, BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the witness - let serialized_witnesses: Vec = - witness_stack.try_into().expect("could not serialize witness map"); - let witness_path = temp_directory.join("witness").with_extension("tr"); - write_to_file(&serialized_witnesses, &witness_path); - - // Create proof and store it in the specified path - let proof_with_public_inputs = - ProveCommand { crs_path: self.crs_directory(), artifact_path, witness_path } - .run(binary_path)?; - - let proof = bb_abstraction_leaks::remove_public_inputs( - // TODO(https://github.com/noir-lang/noir/issues/4428) - num_public_inputs as usize, - &proof_with_public_inputs, - ); - Ok(proof) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn verify( - &self, - proof: &[u8], - public_inputs: WitnessMap, - artifact_path: PathBuf, - ) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the proof - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - artifact_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Verify the proof - VerifyCommand { crs_path: self.crs_directory(), proof_path, vk_path }.run(binary_path) - } - - pub fn get_intermediate_proof_artifacts( - &self, - artifact_path: PathBuf, - proof: &[u8], - public_inputs: WitnessMap, - ) -> Result<(Vec, FieldElement, Vec), BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - artifact_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Create a temporary file for the proof - - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Now ready to generate intermediate artifacts. - - let proof_as_fields = - ProofAsFieldsCommand { proof_path, vk_path: vk_path.clone() }.run(binary_path)?; - - let (vk_hash, vk_as_fields) = VkAsFieldsCommand { vk_path }.run(binary_path)?; - - Ok((proof_as_fields, vk_hash, vk_as_fields)) - } -} - -pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { - let display = path.display(); - - let mut file = match File::create(path) { - Err(why) => panic!("couldn't create {display}: {why}"), - Ok(file) => file, - }; - - match file.write_all(bytes) { - Err(why) => panic!("couldn't write to {display}: {why}"), - Ok(_) => display.to_string(), - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/smart_contract.rs b/noir/noir-repo/tooling/backend_interface/src/smart_contract.rs deleted file mode 100644 index 8b26ea07a2f..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/smart_contract.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::path::PathBuf; - -use crate::{ - cli::{ContractCommand, WriteVkCommand}, - Backend, BackendError, -}; -use tempfile::tempdir; - -impl Backend { - pub fn eth_contract(&self, artifact_path: PathBuf) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path().to_path_buf(); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory_path.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - artifact_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - ContractCommand { crs_path: self.crs_directory(), vk_path }.run(binary_path) - } -} - -#[cfg(test)] -mod tests { - - use serde_json::json; - use tempfile::tempdir; - - use crate::{get_mock_backend, proof_system::write_to_file, BackendError}; - - #[test] - fn test_smart_contract() -> Result<(), BackendError> { - let dummy_artifact = json!({"bytecode": ""}); - let artifact_bytes = serde_json::to_vec(&dummy_artifact).unwrap(); - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("program.json"); - write_to_file(&artifact_bytes, &artifact_path); - - let contract = get_mock_backend()?.eth_contract(artifact_path)?; - - assert!(contract.contains("contract VerifierContract")); - - Ok(()) - } -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock deleted file mode 100644 index 3c14a936907..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock +++ /dev/null @@ -1,223 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "anstream" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" - -[[package]] -name = "anstyle-parse" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" -dependencies = [ - "windows-sys", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" -dependencies = [ - "anstyle", - "windows-sys", -] - -[[package]] -name = "clap" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" - -[[package]] -name = "colorchoice" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "mock_backend" -version = "0.1.0" -dependencies = [ - "clap", -] - -[[package]] -name = "proc-macro2" -version = "1.0.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "syn" -version = "2.0.48" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml deleted file mode 100644 index f527b03a7b9..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[workspace] - -[package] -name = "mock_backend" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -clap = { version = "4.3.19", features = ["derive"] } diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs deleted file mode 100644 index 7ee41121d61..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs +++ /dev/null @@ -1,21 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ContractCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) contract_path: PathBuf, -} - -pub(crate) fn run(args: ContractCommand) { - assert!(args.vk_path.is_file(), "Could not find vk file at provided path"); - - std::io::stdout().write_all(b"contract VerifierContract {}").unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs deleted file mode 100644 index 0cebfbca42d..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs +++ /dev/null @@ -1,20 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct GatesCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, -} - -pub(crate) fn run(args: GatesCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - let response: &str = r#"{ "functions": [{"acir_opcodes": 123, "circuit_size": 125 }] }"#; - - std::io::stdout().write_all(response.as_bytes()).unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs deleted file mode 100644 index 74ea82d28f8..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ /dev/null @@ -1,41 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -use clap::{Parser, Subcommand}; - -mod contract_cmd; -mod gates_cmd; -mod prove_cmd; -mod verify_cmd; -mod write_vk_cmd; - -#[derive(Parser, Debug)] -#[command(name = "mock_backend")] -struct BackendCli { - #[command(subcommand)] - command: BackendCommand, -} - -#[derive(Subcommand, Clone, Debug)] -enum BackendCommand { - Contract(contract_cmd::ContractCommand), - Gates(gates_cmd::GatesCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), - #[command(name = "write_vk")] - WriteVk(write_vk_cmd::WriteVkCommand), -} - -fn main() { - let BackendCli { command } = BackendCli::parse(); - - match command { - BackendCommand::Contract(args) => contract_cmd::run(args), - BackendCommand::Gates(args) => gates_cmd::run(args), - BackendCommand::Prove(args) => prove_cmd::run(args), - BackendCommand::Verify(args) => verify_cmd::run(args), - BackendCommand::WriteVk(args) => write_vk_cmd::run(args), - }; -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs deleted file mode 100644 index 3967778d4e8..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs +++ /dev/null @@ -1,25 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ProveCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'w')] - pub(crate) witness_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) proof_path: PathBuf, -} - -pub(crate) fn run(args: ProveCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - assert!(args.witness_path.is_file(), "Could not find witness file at provided path"); - - std::io::stdout().write_all(b"proof").unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs deleted file mode 100644 index 1a715eea880..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs +++ /dev/null @@ -1,24 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct VerifyCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'p')] - pub(crate) proof_path: PathBuf, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'r')] - pub(crate) is_recursive: bool, -} - -pub(crate) fn run(args: VerifyCommand) { - assert!(args.vk_path.is_file(), "Could not find verification key file at provided path"); - assert!(args.proof_path.is_file(), "Could not find proof file at provided path"); - - std::fs::write(args.proof_path, "proof").unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs deleted file mode 100644 index fcee224e85b..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs +++ /dev/null @@ -1,20 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct WriteVkCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) vk_path: PathBuf, -} - -pub(crate) fn run(args: WriteVkCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - std::fs::write(args.vk_path, "vk").unwrap(); -} diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/Cargo.toml b/noir/noir-repo/tooling/bb_abstraction_leaks/Cargo.toml deleted file mode 100644 index 972c78831a7..00000000000 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "bb_abstraction_leaks" -description = "A crate which encapsulates knowledge about Barretenberg which is currently leaking into Nargo" -version = "0.11.0" -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs deleted file mode 100644 index 45da7f9d00c..00000000000 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs +++ /dev/null @@ -1,58 +0,0 @@ -use build_target::{Arch, Os}; -use const_format::formatcp; - -// Useful for printing debugging messages during the build -// macro_rules! p { -// ($($tokens: tt)*) => { -// println!("cargo:warning={}", format!($($tokens)*)) -// } -// } - -const USERNAME: &str = "AztecProtocol"; -const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.38.0"; -const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); - -const API_URL: &str = - formatcp!("https://github.com/{}/{}/releases/download/{}", USERNAME, REPO, TAG); - -fn main() -> Result<(), String> { - // We need to inject which OS we're building for so that we can download the correct barretenberg binary. - let os = match build_target::target_os().unwrap() { - os @ (Os::Linux | Os::MacOs) => os, - Os::Windows => todo!("Windows is not currently supported"), - os_name => panic!("Unsupported OS {os_name}"), - }; - - let arch = match build_target::target_arch().unwrap() { - arch @ (Arch::X86_64 | Arch::AARCH64) => arch, - arch_name => panic!("Unsupported Architecture {arch_name}"), - }; - - // Arm builds of linux are not supported - // We do not panic because we allow users to run nargo without a backend. - if let (Os::Linux, Arch::AARCH64) = (&os, &arch) { - println!( - "cargo:warning=ARM64 builds of linux are not supported for the barretenberg binary" - ); - }; - - println!("cargo:rustc-env=BB_BINARY_URL={}", get_bb_download_url(arch, os)); - println!("cargo:rustc-env=BB_VERSION={}", VERSION); - - Ok(()) -} - -fn get_bb_download_url(target_arch: Arch, target_os: Os) -> String { - let archive_name = match target_os { - Os::Linux => "barretenberg-x86_64-linux-gnu.tar.gz", - Os::MacOs => match target_arch { - Arch::AARCH64 => "barretenberg-aarch64-apple-darwin.tar.gz", - Arch::X86_64 => "barretenberg-x86_64-apple-darwin.tar.gz", - arch => panic!("unsupported arch {arch}"), - }, - os => panic!("Unsupported OS {os}"), - }; - - format!("{API_URL}/{archive_name}") -} diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/src/lib.rs b/noir/noir-repo/tooling/bb_abstraction_leaks/src/lib.rs deleted file mode 100644 index 56a4f58cd21..00000000000 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/src/lib.rs +++ /dev/null @@ -1,26 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use acvm::{acir::native_types::WitnessMap, FieldElement}; - -pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; -pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); -pub const BB_VERSION: &str = env!("BB_VERSION"); - -/// Removes the public inputs which are prepended to a proof by Barretenberg. -pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { - // Barretenberg prepends the public inputs onto the proof so we need to remove - // the first `num_pub_inputs` field elements. - let num_bytes_to_remove = num_pub_inputs * (FieldElement::max_num_bytes() as usize); - proof[num_bytes_to_remove..].to_vec() -} - -/// Prepends a set of public inputs to a proof. -pub fn prepend_public_inputs(proof: Vec, public_inputs: WitnessMap) -> Vec { - // We omit any unassigned witnesses. - // Witness values should be ordered by their index but we skip over any indices without an assignment. - let public_inputs_bytes = - public_inputs.into_iter().flat_map(|(_, assignment)| assignment.to_be_bytes()); - - public_inputs_bytes.chain(proof).collect() -} diff --git a/noir/noir-repo/tooling/debugger/tests/debug.rs b/noir/noir-repo/tooling/debugger/tests/debug.rs index b104a2c84ac..313b6b30591 100644 --- a/noir/noir-repo/tooling/debugger/tests/debug.rs +++ b/noir/noir-repo/tooling/debugger/tests/debug.rs @@ -16,12 +16,6 @@ mod tests { let mut dbg_session = spawn_bash(Some(timeout_seconds * 1000)).expect("Could not start bash session"); - // Set backend to `/dev/null` to force an error if nargo tries to speak to a backend. - dbg_session - .send_line("export NARGO_BACKEND_PATH=/dev/null") - .expect("Could not export NARGO_BACKEND_PATH."); - dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); - // Start debugger and test that it loads for the given program. dbg_session .execute( diff --git a/noir/noir-repo/tooling/nargo/src/constants.rs b/noir/noir-repo/tooling/nargo/src/constants.rs index 0b50d61fe37..1048d86fcd7 100644 --- a/noir/noir-repo/tooling/nargo/src/constants.rs +++ b/noir/noir-repo/tooling/nargo/src/constants.rs @@ -13,8 +13,6 @@ pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs pub const PROVER_INPUT_FILE: &str = "Prover"; -/// The file from which Nargo pulls verifier inputs -pub const VERIFIER_INPUT_FILE: &str = "Verifier"; /// The package definition file for a Noir project. pub const PKG_FILE: &str = "Nargo.toml"; diff --git a/noir/noir-repo/tooling/nargo/src/package.rs b/noir/noir-repo/tooling/nargo/src/package.rs index ecbf3585210..44f0a3504f7 100644 --- a/noir/noir-repo/tooling/nargo/src/package.rs +++ b/noir/noir-repo/tooling/nargo/src/package.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, fmt::Display, path::PathBuf}; use noirc_frontend::graph::CrateName; -use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use crate::constants::PROVER_INPUT_FILE; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PackageType { @@ -59,11 +59,6 @@ impl Package { // For now it is hard-coded to be toml. self.root_dir.join(format!("{PROVER_INPUT_FILE}.toml")) } - pub fn verifier_input_path(&self) -> PathBuf { - // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths - // For now it is hard-coded to be toml. - self.root_dir.join(format!("{VERIFIER_INPUT_FILE}.toml")) - } pub fn is_binary(&self) -> bool { self.package_type == PackageType::Binary diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index c20be037e62..d10dd6a22ff 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -43,11 +43,10 @@ thiserror.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true -hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" -tokio = { version = "1.0", features = ["io-std"] } +tokio = { version = "1.0", features = ["io-std", "rt"] } dap.workspace = true clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } @@ -55,9 +54,6 @@ notify = "6.1.1" notify-debouncer-full = "0.3.1" termion = "3.0.0" -# Backends -backend-interface = { path = "../backend_interface" } - # Logs tracing-subscriber.workspace = true tracing-appender = "0.2.3" diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 0ed2d4c07f7..74042cf4e40 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -63,7 +63,6 @@ fn execution_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -101,7 +100,6 @@ fn execution_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -139,7 +137,6 @@ fn noir_test_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -177,7 +174,6 @@ fn noir_test_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -218,7 +214,6 @@ fn compile_success_empty_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); @@ -269,7 +264,6 @@ fn compile_success_contract_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); @@ -307,7 +301,6 @@ fn compile_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); diff --git a/noir/noir-repo/tooling/nargo_cli/src/backends.rs b/noir/noir-repo/tooling/nargo_cli/src/backends.rs deleted file mode 100644 index 2b3e9d8861f..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/backends.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::PathBuf; - -use backend_interface::backends_directory; -pub(crate) use backend_interface::Backend; - -fn active_backend_file_path() -> PathBuf { - backends_directory().join(".selected_backend") -} - -pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; - -pub(crate) fn clear_active_backend() { - let active_backend_file = active_backend_file_path(); - if active_backend_file.is_file() { - std::fs::remove_file(active_backend_file_path()) - .expect("should delete active backend file"); - } -} - -pub(crate) fn set_active_backend(backend_name: &str) { - let active_backend_file = active_backend_file_path(); - let backends_directory = - active_backend_file.parent().expect("active backend file should have parent"); - - std::fs::create_dir_all(backends_directory).expect("Could not create backends directory"); - std::fs::write(active_backend_file, backend_name.as_bytes()) - .expect("Could not write to active backend file"); -} - -pub(crate) fn get_active_backend() -> String { - let active_backend_file = active_backend_file_path(); - - if !active_backend_file.is_file() { - set_active_backend(ACVM_BACKEND_BARRETENBERG); - return ACVM_BACKEND_BARRETENBERG.to_string(); - } - - std::fs::read_to_string(active_backend_file).expect("Could not read active backend file") -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs deleted file mode 100644 index 5aba00764d3..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs +++ /dev/null @@ -1,13 +0,0 @@ -use clap::Args; - -use crate::{backends::get_active_backend, errors::CliError}; - -/// Prints the name of the currently active backend -#[derive(Debug, Clone, Args)] -pub(crate) struct CurrentCommand; - -pub(crate) fn run(_args: CurrentCommand) -> Result<(), CliError> { - println!("{}", get_active_backend()); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs deleted file mode 100644 index 974db9ff7f5..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs +++ /dev/null @@ -1,30 +0,0 @@ -use clap::Args; - -use backend_interface::{backends_directory, download_backend}; - -use crate::errors::{BackendError, CliError}; - -use super::ls_cmd::get_available_backends; - -/// Install a new backend from a URL. -#[derive(Debug, Clone, Args)] -pub(crate) struct InstallCommand { - /// The name of the backend to install. - backend: String, - - /// The URL from which to download the backend. - url: String, -} - -pub(crate) fn run(args: InstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if installed_backends.contains(&args.backend) { - return Err(BackendError::AlreadyInstalled(args.backend).into()); - } - - download_backend(&args.url, &backends_directory().join(args.backend).join("backend_binary")) - .map_err(BackendError::from)?; - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs deleted file mode 100644 index da37b104d65..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ /dev/null @@ -1,34 +0,0 @@ -use backend_interface::backends_directory; -use clap::Args; - -use crate::errors::CliError; - -/// Prints the list of currently installed backends -#[derive(Debug, Clone, Args)] -pub(crate) struct LsCommand; - -pub(crate) fn run(_args: LsCommand) -> Result<(), CliError> { - for backend in get_available_backends() { - println!("{backend}"); - } - - Ok(()) -} - -pub(super) fn get_available_backends() -> Vec { - let backend_directory_contents = std::fs::read_dir(backends_directory()) - .expect("Could not read backends directory contents"); - - // TODO: Highlight the currently active backend. - backend_directory_contents - .into_iter() - .filter_map(|entry| { - let path = entry.ok()?.path(); - if path.is_dir() { - path.file_name().map(|name| name.to_string_lossy().to_string()) - } else { - None - } - }) - .collect() -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs deleted file mode 100644 index 985dbbdb934..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::{Args, Subcommand}; - -use crate::errors::CliError; - -mod current_cmd; -mod install_cmd; -mod ls_cmd; -mod uninstall_cmd; -mod use_cmd; - -#[non_exhaustive] -#[derive(Args, Clone, Debug)] -/// Install and select custom backends used to generate and verify proofs. -pub(crate) struct BackendCommand { - #[command(subcommand)] - command: BackendCommands, -} - -#[non_exhaustive] -#[derive(Subcommand, Clone, Debug)] -pub(crate) enum BackendCommands { - Current(current_cmd::CurrentCommand), - Ls(ls_cmd::LsCommand), - Use(use_cmd::UseCommand), - Install(install_cmd::InstallCommand), - Uninstall(uninstall_cmd::UninstallCommand), -} - -pub(crate) fn run(cmd: BackendCommand) -> Result<(), CliError> { - let BackendCommand { command } = cmd; - - match command { - BackendCommands::Current(args) => current_cmd::run(args), - BackendCommands::Ls(args) => ls_cmd::run(args), - BackendCommands::Use(args) => use_cmd::run(args), - BackendCommands::Install(args) => install_cmd::run(args), - BackendCommands::Uninstall(args) => uninstall_cmd::run(args), - }?; - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs deleted file mode 100644 index 7497f1bc2f6..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs +++ /dev/null @@ -1,59 +0,0 @@ -use clap::Args; - -use backend_interface::backends_directory; - -use crate::{ - backends::{ - clear_active_backend, get_active_backend, set_active_backend, ACVM_BACKEND_BARRETENBERG, - }, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Uninstalls a backend -#[derive(Debug, Clone, Args)] -pub(crate) struct UninstallCommand { - /// The name of the backend to uninstall. - backend: String, -} - -pub(crate) fn run(args: UninstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if !installed_backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - let active_backend = get_active_backend(); - - // Handle the case where we're uninstalling the currently active backend. - if active_backend == args.backend { - let barretenberg_is_installed = - installed_backends.iter().any(|backend_name| backend_name == ACVM_BACKEND_BARRETENBERG); - - let new_active_backend = - if args.backend != ACVM_BACKEND_BARRETENBERG && barretenberg_is_installed { - // Prefer switching to barretenberg if possible. - Some(ACVM_BACKEND_BARRETENBERG) - } else { - // Otherwise pick the first backend which isn't being uninstalled. - installed_backends - .iter() - .find(|&backend_name| backend_name != &args.backend) - .map(|name| name.as_str()) - }; - - if let Some(backend) = new_active_backend { - set_active_backend(backend); - } else { - // We've deleted the last backend. Clear the active backend file to be recreated once we install a new one. - clear_active_backend(); - } - } - - std::fs::remove_dir_all(backends_directory().join(args.backend)) - .expect("backend directory should be deleted"); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs deleted file mode 100644 index 66a129c2148..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs +++ /dev/null @@ -1,26 +0,0 @@ -use clap::Args; - -use crate::{ - backends::set_active_backend, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Select the backend to use -#[derive(Debug, Clone, Args)] -pub(crate) struct UseCommand { - backend: String, -} - -pub(crate) fn run(args: UseCommand) -> Result<(), CliError> { - let backends = get_available_backends(); - - if !backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - set_active_backend(&args.backend); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index d5313d96076..e2e1f147b90 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -95,13 +95,11 @@ fn check_package( Ok(false) } else { // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files - if let Some((parameters, return_type)) = compute_function_abi(&context, &crate_id) { + if let Some((parameters, _)) = compute_function_abi(&context, &crate_id) { let path_to_prover_input = package.prover_input_path(); - let path_to_verifier_input = package.verifier_input_path(); // Before writing the file, check if it exists and whether overwrite is set let should_write_prover = !path_to_prover_input.exists() || allow_overwrite; - let should_write_verifier = !path_to_verifier_input.exists() || allow_overwrite; if should_write_prover { let prover_toml = create_input_toml_template(parameters.clone(), None); @@ -110,19 +108,7 @@ fn check_package( eprintln!("Note: Prover.toml already exists. Use --overwrite to force overwrite."); } - if should_write_verifier { - let public_inputs = - parameters.into_iter().filter(|param| param.is_public()).collect(); - - let verifier_toml = create_input_toml_template(public_inputs, return_type); - write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); - } else { - eprintln!( - "Note: Verifier.toml already exists. Use --overwrite to force overwrite." - ); - } - - let any_file_written = should_write_prover || should_write_verifier; + let any_file_written = should_write_prover; Ok(any_file_written) } else { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs deleted file mode 100644 index 6247560f621..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ /dev/null @@ -1,68 +0,0 @@ -use super::compile_cmd::compile_workspace_full; -use super::fs::{create_named_dir, write_to_file}; -use super::NargoConfig; -use crate::backends::Backend; -use crate::cli::fs::program::read_program_from_file; -use crate::errors::CliError; - -use clap::Args; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -/// Generates a Solidity verifier smart contract for the program -#[derive(Debug, Clone, Args)] -pub(crate) struct CodegenVerifierCommand { - /// The name of the package to codegen - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Codegen all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: CodegenVerifierCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - // Compile the full workspace in order to generate any build artifacts. - compile_workspace_full(&workspace, &args.compile_options)?; - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let program_artifact_path = workspace.package_build_path(package); - let program = read_program_from_file(&program_artifact_path)?; - - // TODO(https://github.com/noir-lang/noir/issues/4428): - // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. - // However, in the future we can expect to possibly have non-inlined ACIR functions during compilation - // that will be inlined at a later step such as by the ACVM compiler or by the backend. - // Add appropriate handling here once the compiler enables multiple ACIR functions. - assert_eq!(program.bytecode.functions.len(), 1); - let smart_contract_string = backend.eth_contract(program_artifact_path)?; - - let contract_dir = workspace.contracts_directory_path(package); - create_named_dir(&contract_dir, "contract"); - let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); - - let path = write_to_file(smart_contract_string.as_bytes(), &contract_path); - println!("[{}] Contract successfully created and located at {path}", package.name); - } - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs index bd038c51ad5..dee9a00507c 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs @@ -6,8 +6,6 @@ use std::{collections::BTreeMap, path::Path}; use crate::errors::FilesystemError; -use super::write_to_file; - /// Returns the circuit's parameters and its return value, if one exists. /// # Examples /// @@ -36,99 +34,3 @@ pub(crate) fn read_inputs_from_file>( Ok((input_map, return_value)) } - -pub(crate) fn write_inputs_to_file>( - input_map: &InputMap, - return_value: &Option, - abi: &Abi, - path: P, - file_name: &str, - format: Format, -) -> Result<(), FilesystemError> { - let file_path = path.as_ref().join(file_name).with_extension(format.ext()); - - // We must insert the return value into the `InputMap` in order for it to be written to file. - let serialized_output = match return_value { - // Parameters and return values are kept separate except for when they're being written to file. - // As a result, we don't want to modify the original map and must clone it before insertion. - Some(return_value) => { - let mut input_map = input_map.clone(); - input_map.insert(MAIN_RETURN_NAME.to_owned(), return_value.clone()); - format.serialize(&input_map, abi)? - } - // If no return value exists, then we can serialize the original map directly. - None => format.serialize(input_map, abi)?, - }; - - write_to_file(serialized_output.as_bytes(), &file_path); - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, vec}; - - use acvm::FieldElement; - use nargo::constants::VERIFIER_INPUT_FILE; - use noirc_abi::{ - input_parser::{Format, InputValue}, - Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, - }; - use tempfile::TempDir; - - use super::{read_inputs_from_file, write_inputs_to_file}; - - #[test] - fn write_and_read_recovers_inputs_and_return_value() { - let input_dir = TempDir::new().unwrap().into_path(); - - // We purposefully test a simple ABI here as we're focussing on `fs`. - // Tests for serializing complex types should exist in `noirc_abi`. - let abi = Abi { - parameters: vec![ - AbiParameter { - name: "foo".into(), - typ: AbiType::Field, - visibility: AbiVisibility::Public, - }, - AbiParameter { - name: "bar".into(), - typ: AbiType::String { length: 11 }, - visibility: AbiVisibility::Private, - }, - ], - return_type: Some(AbiReturnType { - abi_type: AbiType::Field, - visibility: AbiVisibility::Public, - }), - - // Input serialization is only dependent on types, not position in witness map. - // Neither of these should be relevant so we leave them empty. - param_witnesses: BTreeMap::new(), - return_witnesses: Vec::new(), - error_types: BTreeMap::new(), - }; - let input_map = BTreeMap::from([ - ("foo".to_owned(), InputValue::Field(42u128.into())), - ("bar".to_owned(), InputValue::String("hello world".to_owned())), - ]); - let return_value = Some(InputValue::Field(FieldElement::zero())); - - write_inputs_to_file( - &input_map, - &return_value, - &abi, - &input_dir, - VERIFIER_INPUT_FILE, - Format::Toml, - ) - .unwrap(); - - let (loaded_inputs, loaded_return_value) = - read_inputs_from_file(input_dir, VERIFIER_INPUT_FILE, Format::Toml, &abi).unwrap(); - - assert_eq!(loaded_inputs, input_map); - assert_eq!(loaded_return_value, return_value); - } -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs index 4ebce3b3325..8658bd5b248 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs @@ -4,11 +4,8 @@ use std::{ path::{Path, PathBuf}, }; -use crate::errors::FilesystemError; - pub(super) mod inputs; pub(super) mod program; -pub(super) mod proof; pub(super) mod witness; pub(super) fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { @@ -31,12 +28,3 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { Ok(_) => display.to_string(), } } - -pub(super) fn load_hex_data>(path: P) -> Result, FilesystemError> { - let hex_data: Vec<_> = std::fs::read(&path) - .map_err(|_| FilesystemError::PathNotValid(path.as_ref().to_path_buf()))?; - - let raw_bytes = hex::decode(hex_data).map_err(FilesystemError::HexArtifactNotValid)?; - - Ok(raw_bytes) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs deleted file mode 100644 index d2b3050708b..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::{Path, PathBuf}; - -use nargo::constants::PROOF_EXT; - -use crate::errors::FilesystemError; - -use super::{create_named_dir, write_to_file}; - -pub(crate) fn save_proof_to_dir>( - proof: &[u8], - proof_name: &str, - proof_dir: P, -) -> Result { - create_named_dir(proof_dir.as_ref(), "proof"); - let proof_path = proof_dir.as_ref().join(proof_name).with_extension(PROOF_EXT); - - write_to_file(hex::encode(proof).as_bytes(), &proof_path); - - Ok(proof_path) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index d68aef497f6..11cf6e22ab5 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,7 +1,6 @@ -use std::{collections::HashMap, path::PathBuf}; +use std::collections::HashMap; use acvm::acir::circuit::ExpressionWidth; -use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; use nargo::{ @@ -16,7 +15,6 @@ use prettytable::{row, table, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::backends::Backend; use crate::errors::CliError; use super::{ @@ -50,11 +48,7 @@ pub(crate) struct InfoCommand { compile_options: CompileOptions, } -pub(crate) fn run( - backend: &Backend, - args: InfoCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -93,14 +87,12 @@ pub(crate) fn run( .par_bridge() .map(|(package, program)| { count_opcodes_and_gates_in_program( - backend, - workspace.package_build_path(&package), program, &package, args.compile_options.expression_width, ) }) - .collect::>()?; + .collect(); let info_report = InfoReport { programs: program_info, contracts: Vec::new() }; @@ -196,7 +188,6 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", program_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } @@ -215,7 +206,6 @@ struct ContractInfo { struct FunctionInfo { name: String, acir_opcodes: usize, - circuit_size: u32, } impl From for Vec { @@ -226,34 +216,26 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } } fn count_opcodes_and_gates_in_program( - backend: &Backend, - program_artifact_path: PathBuf, compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, -) -> Result { - let program_circuit_sizes = backend.get_exact_circuit_sizes(program_artifact_path)?; +) -> ProgramInfo { let functions = compiled_program .bytecode .functions .into_par_iter() .enumerate() - .map(|(i, function)| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: compiled_program.names[i].clone(), - // Required while mock backend doesn't return correct circuit size. - acir_opcodes: function.opcodes.len(), - circuit_size: program_circuit_sizes[i].circuit_size, - }) + .map(|(i, function)| FunctionInfo { + name: compiled_program.names[i].clone(), + acir_opcodes: function.opcodes.len(), }) - .collect::>()?; + .collect(); - Ok(ProgramInfo { package_name: package.name.to_string(), expression_width, functions }) + ProgramInfo { package_name: package.name.to_string(), expression_width, functions } } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index ad778549ac0..485ccc7abaf 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -6,13 +6,9 @@ use std::path::PathBuf; use color_eyre::eyre; -use crate::backends::get_active_backend; - mod fs; -mod backend_cmd; mod check_cmd; -mod codegen_verifier_cmd; mod compile_cmd; mod dap_cmd; mod debug_cmd; @@ -23,9 +19,7 @@ mod info_cmd; mod init_cmd; mod lsp_cmd; mod new_cmd; -mod prove_cmd; mod test_cmd; -mod verify_cmd; const GIT_HASH: &str = env!("GIT_COMMIT"); const IS_DIRTY: &str = env!("GIT_DIRTY"); @@ -60,10 +54,8 @@ pub(crate) struct NargoConfig { #[non_exhaustive] #[derive(Subcommand, Clone, Debug)] enum NargoCommand { - Backend(backend_cmd::BackendCommand), Check(check_cmd::CheckCommand), Fmt(fmt_cmd::FormatCommand), - CodegenVerifier(codegen_verifier_cmd::CodegenVerifierCommand), #[command(alias = "build")] Compile(compile_cmd::CompileCommand), New(new_cmd::NewCommand), @@ -73,8 +65,6 @@ enum NargoCommand { Export(export_cmd::ExportCommand), #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), @@ -94,18 +84,11 @@ pub(crate) fn start_cli() -> eyre::Result<()> { // Search through parent directories to find package root if necessary. if !matches!( command, - NargoCommand::New(_) - | NargoCommand::Init(_) - | NargoCommand::Lsp(_) - | NargoCommand::Backend(_) - | NargoCommand::Dap(_) + NargoCommand::New(_) | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } - let active_backend = get_active_backend(); - let backend = crate::backends::Backend::new(active_backend); - match command { NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), @@ -114,12 +97,8 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::Debug(args) => debug_cmd::run(args, config), NargoCommand::Execute(args) => execute_cmd::run(args, config), NargoCommand::Export(args) => export_cmd::run(args, config), - NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), - NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(args, config), - NargoCommand::Info(args) => info_cmd::run(&backend, args, config), - NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), - NargoCommand::Backend(args) => backend_cmd::run(args), + NargoCommand::Info(args) => info_cmd::run(args, config), NargoCommand::Lsp(args) => lsp_cmd::run(args, config), NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs deleted file mode 100644 index 127c5ac2ebb..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::path::PathBuf; - -use clap::Args; -use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::package::Package; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -use super::compile_cmd::compile_workspace_full; -use super::fs::program::read_program_from_file; -use super::fs::{ - inputs::{read_inputs_from_file, write_inputs_to_file}, - proof::save_proof_to_dir, -}; -use super::NargoConfig; -use crate::{backends::Backend, cli::execute_cmd::execute_program, errors::CliError}; - -/// Create proof for this program. The proof is returned as a hex encoded string. -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "p")] -pub(crate) struct ProveCommand { - /// The name of the toml file which contains the inputs for the prover - #[clap(long, short, default_value = PROVER_INPUT_FILE)] - prover_name: String, - - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// Verify proof after proving - #[arg(long)] - verify: bool, - - /// The name of the package to prove - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Prove all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, - - /// JSON RPC url to solve oracle calls - #[clap(long)] - oracle_resolver: Option, -} - -pub(crate) fn run( - backend: &Backend, - args: ProveCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - // Compile the full workspace in order to generate any build artifacts. - compile_workspace_full(&workspace, &args.compile_options)?; - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let program_artifact_path = workspace.package_build_path(package); - let program: CompiledProgram = read_program_from_file(&program_artifact_path)?.into(); - - let proof = prove_package( - backend, - package, - program, - program_artifact_path, - &args.prover_name, - &args.verifier_name, - args.verify, - args.oracle_resolver.as_deref(), - )?; - - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -fn prove_package( - backend: &Backend, - package: &Package, - compiled_program: CompiledProgram, - program_artifact_path: PathBuf, - prover_name: &str, - verifier_name: &str, - check_proof: bool, - foreign_call_resolver_url: Option<&str>, -) -> Result, CliError> { - // Parse the initial witness values from Prover.toml - let (inputs_map, _) = - read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - - let witness_stack = execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; - - // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.public_abi(); - // Get the entry point witness for the ABI - let main_witness = - &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (public_inputs, return_value) = public_abi.decode(main_witness)?; - - write_inputs_to_file( - &public_inputs, - &return_value, - &public_abi, - &package.root_dir, - verifier_name, - Format::Toml, - )?; - - let proof = backend.prove( - program_artifact_path.clone(), - witness_stack, - compiled_program.program.functions[0].public_inputs().0.len() as u32, - )?; - - if check_proof { - let public_inputs = public_abi.encode(&public_inputs, return_value)?; - let valid_proof = backend.verify(&proof, public_inputs, program_artifact_path)?; - - if !valid_proof { - return Err(CliError::InvalidProof("".into())); - } - } - - Ok(proof) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs deleted file mode 100644 index ad1978cabe0..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ /dev/null @@ -1,92 +0,0 @@ -use super::compile_cmd::compile_workspace_full; -use super::fs::program::read_program_from_file; -use super::fs::{inputs::read_inputs_from_file, load_hex_data}; -use super::NargoConfig; -use crate::{backends::Backend, errors::CliError}; - -use clap::Args; -use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -/// Given a proof and a program, verify whether the proof is valid -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "v")] -pub(crate) struct VerifyCommand { - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// The name of the package verify - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Verify all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: VerifyCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - // Compile the full workspace in order to generate any build artifacts. - compile_workspace_full(&workspace, &args.compile_options)?; - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let program_artifact_path = workspace.package_build_path(package); - let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); - - verify_package(backend, &workspace, package, program, &args.verifier_name)?; - } - - Ok(()) -} - -fn verify_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - verifier_name: &str, -) -> Result<(), CliError> { - // Load public inputs (if any) from `verifier_name`. - let public_abi = compiled_program.abi.public_abi(); - let (public_inputs_map, return_value) = - read_inputs_from_file(&package.root_dir, verifier_name, Format::Toml, &public_abi)?; - - let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - - let proof_path = - workspace.proofs_directory_path().join(package.name.to_string()).with_extension(PROOF_EXT); - - let proof = load_hex_data(&proof_path)?; - - let valid_proof = - backend.verify(&proof, public_inputs, workspace.package_build_path(package))?; - - if valid_proof { - Ok(()) - } else { - Err(CliError::InvalidProof(proof_path)) - } -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/errors.rs b/noir/noir-repo/tooling/nargo_cli/src/errors.rs index 40fb7886405..3e0b13a9cbc 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/errors.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/errors.rs @@ -1,5 +1,4 @@ use acvm::acir::native_types::WitnessStackError; -use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; @@ -11,8 +10,7 @@ use thiserror::Error; pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] PathNotValid(PathBuf), - #[error("Error: could not parse hex build artifact (proof, proving and/or verification keys, ACIR checksum) ({0})")] - HexArtifactNotValid(FromHexError), + #[error( " Error: cannot find {0}.toml file.\n Expected location: {1:?} \n Please generate this file at the expected location." )] @@ -37,9 +35,6 @@ pub(crate) enum CliError { #[error("Error: destination {} already exists", .0.display())] DestinationAlreadyExists(PathBuf), - #[error("Failed to verify proof {}", .0.display())] - InvalidProof(PathBuf), - #[error("Invalid package name {0}. Did you mean to use `--name`?")] InvalidPackageName(String), @@ -68,24 +63,4 @@ pub(crate) enum CliError { /// Error from the compilation pipeline #[error(transparent)] CompileError(#[from] CompileError), - - /// Error related to backend selection/installation. - #[error(transparent)] - BackendError(#[from] BackendError), - - /// Error related to communication with backend. - #[error(transparent)] - BackendCommunicationError(#[from] backend_interface::BackendError), -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum BackendError { - #[error("No backend is installed with the name {0}")] - UnknownBackend(String), - - #[error("The backend {0} is already installed")] - AlreadyInstalled(String), - - #[error("Backend installation failed: {0}")] - InstallationError(#[from] std::io::Error), } diff --git a/noir/noir-repo/tooling/nargo_cli/src/main.rs b/noir/noir-repo/tooling/nargo_cli/src/main.rs index 6e2b7069bc4..a407d467ced 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/main.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/main.rs @@ -7,7 +7,6 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. -mod backends; mod cli; mod errors; diff --git a/noir/noir-repo/tooling/nargo_cli/tests/codegen-verifier.rs b/noir/noir-repo/tooling/nargo_cli/tests/codegen-verifier.rs deleted file mode 100644 index f991f72b108..00000000000 --- a/noir/noir-repo/tooling/nargo_cli/tests/codegen-verifier.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! This integration test aims to check that the `nargo codegen-verifier` will successfully create a -//! file containing a verifier for a simple program. - -use assert_cmd::prelude::*; -use predicates::prelude::*; -use std::process::Command; - -use assert_fs::prelude::{PathAssert, PathChild}; - -#[test] -fn simple_verifier_codegen() { - let test_dir = assert_fs::TempDir::new().unwrap(); - std::env::set_current_dir(&test_dir).unwrap(); - - // Create trivial program - let project_name = "hello_world"; - let project_dir = test_dir.child(project_name); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("new").arg(project_name); - cmd.assert().success(); - - std::env::set_current_dir(&project_dir).unwrap(); - - // Run `nargo codegen-verifier` - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("codegen-verifier"); - cmd.assert() - .success() - .stdout(predicate::str::contains("Contract successfully created and located at")); - - project_dir - .child("contract") - .child("hello_world") - .child("plonk_vk.sol") - .assert(predicate::path::is_file()); -} diff --git a/scripts/logs/download_base_benchmark_from_s3.sh b/scripts/logs/download_base_benchmark_from_s3.sh index dfe5d631ab4..6da6eb2a6bb 100755 --- a/scripts/logs/download_base_benchmark_from_s3.sh +++ b/scripts/logs/download_base_benchmark_from_s3.sh @@ -12,6 +12,8 @@ BASE_BENCHMARK_FILE_JSON="${BENCH_FOLDER}/base-benchmark.json" # If on a pull request, get the data from the most recent commit on master where it's available to generate a comment comparing them if [ -n "${PULL_REQUEST:-}" ]; then MASTER_COMMIT_HASH=$(curl -s "https://api.github.com/repos/AztecProtocol/aztec-packages/pulls/${PULL_REQUEST##*/}" | jq -r '.base.sha') + # master could have diverged since starting this job, refresh history + git fetch --depth 50 origin master MASTER_COMMIT_HASHES=($(git log $MASTER_COMMIT_HASH --format="%H" -n 50)) mkdir -p $BENCH_FOLDER diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 8679843e046..7a41726cf28 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -100,12 +100,15 @@ anvil: end-to-end: FROM ubuntu:noble - RUN apt-get update && apt-get install -y wget gnupg \ + # add repository for chromium + RUN apt-get update && apt-get install -y software-properties-common \ + && add-apt-repository ppa:xtradeb/apps -y && apt-get update \ + && apt-get install -y wget gnupg \ && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ && echo "deb [arch=$(dpkg --print-architecture)] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ - && apt update && apt install curl nodejs jq google-chrome-stable netcat-openbsd -y \ + && apt update && apt install curl chromium nodejs netcat-openbsd -y \ && rm -rf /var/lib/apt/lists/* - ENV CHROME_BIN="/usr/bin/google-chrome-stable" + ENV CHROME_BIN="/usr/bin/chromium" ENV PATH=/opt/foundry/bin:$PATH COPY +anvil/anvil /opt/foundry/bin/anvil COPY +end-to-end-prod/usr/src /usr/src diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index eb9a9661466..5cd1bc4d512 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -105,6 +105,15 @@ describe('Archiver', () => { } // Expect logs to correspond to what is set by L2Block.random(...) + const noteEncryptedLogs = await archiver.getLogs(1, 100, LogType.NOTEENCRYPTED); + expect(noteEncryptedLogs.length).toEqual(blockNumbers.length); + + for (const [index, x] of blockNumbers.entries()) { + const expectedTotalNumEncryptedLogs = 4 * x * 2; + const totalNumEncryptedLogs = EncryptedL2BlockL2Logs.unrollLogs([noteEncryptedLogs[index]]).length; + expect(totalNumEncryptedLogs).toEqual(expectedTotalNumEncryptedLogs); + } + const encryptedLogs = await archiver.getLogs(1, 100, LogType.ENCRYPTED); expect(encryptedLogs.length).toEqual(blockNumbers.length); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 24069e5c1a0..b2b511b5531 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -265,10 +265,10 @@ export class Archiver implements ArchiveSource { await Promise.all( retrievedBlocks.retrievedData.map(block => { + const noteEncryptedLogs = block.body.noteEncryptedLogs; const encryptedLogs = block.body.encryptedLogs; const unencryptedLogs = block.body.unencryptedLogs; - - return this.store.addLogs(encryptedLogs, unencryptedLogs, block.number); + return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.number); }), ); diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 0188575228f..b3d54af1d03 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -85,12 +85,14 @@ export interface ArchiverDataStore { /** * Append new logs to the store's list. + * @param noteEncryptedLogs - The note encrypted logs to be added to the store. * @param encryptedLogs - The encrypted logs to be added to the store. * @param unencryptedLogs - The unencrypted logs to be added to the store. * @param blockNumber - The block for which to add the logs. * @returns True if the operation is successful. */ addLogs( + noteEncryptedLogs: EncryptedL2BlockL2Logs | undefined, encryptedLogs: EncryptedL2BlockL2Logs | undefined, unencryptedLogs: UnencryptedL2BlockL2Logs | undefined, blockNumber: number, diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 24ef0228b5d..828567e36e3 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -122,6 +122,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it('adds encrypted & unencrypted logs', async () => { await expect( store.addLogs( + blocks.retrievedData[0].body.noteEncryptedLogs, blocks.retrievedData[0].body.encryptedLogs, blocks.retrievedData[0].body.unencryptedLogs, blocks.retrievedData[0].number, @@ -131,23 +132,37 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); describe.each([ + ['note_encrypted', LogType.NOTEENCRYPTED], ['encrypted', LogType.ENCRYPTED], ['unencrypted', LogType.UNENCRYPTED], ])('getLogs (%s)', (_, logType) => { beforeEach(async () => { await Promise.all( blocks.retrievedData.map(block => - store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + store.addLogs( + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, + ), ), ); }); it.each(blockTests)('retrieves previously stored logs', async (from, limit, getExpectedBlocks) => { - const expectedLogs = getExpectedBlocks().map(block => - logType === LogType.ENCRYPTED ? block.body.encryptedLogs : block.body.unencryptedLogs, - ); + const expectedLogs = getExpectedBlocks().map(block => { + switch (logType) { + case LogType.ENCRYPTED: + return block.body.encryptedLogs; + case LogType.NOTEENCRYPTED: + return block.body.noteEncryptedLogs; + case LogType.UNENCRYPTED: + default: + return block.body.unencryptedLogs; + } + }); const actualLogs = await store.getLogs(from, limit, logType); - expect(actualLogs).toEqual(expectedLogs); + expect(actualLogs[0].txLogs[0]).toEqual(expectedLogs[0].txLogs[0]); }); }); @@ -155,7 +170,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch beforeEach(async () => { await Promise.all( blocks.retrievedData.map(block => - store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + store.addLogs( + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, + ), ), ); await store.addBlocks(blocks); @@ -319,7 +339,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await Promise.all( blocks.retrievedData.map(block => - store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + store.addLogs( + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, + ), ), ); }); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index c01b8d2202b..a542e3dcc0b 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -153,11 +153,12 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @returns True if the operation is successful. */ addLogs( + noteEncryptedLogs: EncryptedL2BlockL2Logs | undefined, encryptedLogs: EncryptedL2BlockL2Logs | undefined, unencryptedLogs: UnencryptedL2BlockL2Logs | undefined, blockNumber: number, ): Promise { - return this.#logStore.addLogs(encryptedLogs, unencryptedLogs, blockNumber); + return this.#logStore.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, blockNumber); } /** diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index 305c385d254..6724256d80f 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -20,12 +20,14 @@ import { type BlockStore } from './block_store.js'; * A store for logs */ export class LogStore { + #noteEncryptedLogs: AztecMap; #encryptedLogs: AztecMap; #unencryptedLogs: AztecMap; #logsMaxPageSize: number; #log = createDebugLogger('aztec:archiver:log_store'); constructor(private db: AztecKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) { + this.#noteEncryptedLogs = db.openMap('archiver_note_encrypted_logs'); this.#encryptedLogs = db.openMap('archiver_encrypted_logs'); this.#unencryptedLogs = db.openMap('archiver_unencrypted_logs'); @@ -40,11 +42,16 @@ export class LogStore { * @returns True if the operation is successful. */ addLogs( + noteEncryptedLogs: EncryptedL2BlockL2Logs | undefined, encryptedLogs: EncryptedL2BlockL2Logs | undefined, unencryptedLogs: UnencryptedL2BlockL2Logs | undefined, blockNumber: number, ): Promise { return this.db.transaction(() => { + if (noteEncryptedLogs) { + void this.#noteEncryptedLogs.set(blockNumber, noteEncryptedLogs.toBuffer()); + } + if (encryptedLogs) { void this.#encryptedLogs.set(blockNumber, encryptedLogs.toBuffer()); } @@ -69,8 +76,18 @@ export class LogStore { limit: number, logType: TLogType, ): IterableIterator>> { - const logMap = logType === LogType.ENCRYPTED ? this.#encryptedLogs : this.#unencryptedLogs; - const L2BlockL2Logs = logType === LogType.ENCRYPTED ? EncryptedL2BlockL2Logs : UnencryptedL2BlockL2Logs; + const logMap = (() => { + switch (logType) { + case LogType.ENCRYPTED: + return this.#encryptedLogs; + case LogType.NOTEENCRYPTED: + return this.#noteEncryptedLogs; + case LogType.UNENCRYPTED: + default: + return this.#unencryptedLogs; + } + })(); + const L2BlockL2Logs = logType === LogType.UNENCRYPTED ? UnencryptedL2BlockL2Logs : EncryptedL2BlockL2Logs; for (const buffer of logMap.values({ start, limit })) { yield L2BlockL2Logs.fromBuffer(buffer) as L2BlockL2Logs>; } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts index 7c90a611c2f..51f93c71c86 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -27,7 +27,12 @@ describe('MemoryArchiverStore', () => { await archiverStore.addBlocks(blocks); await Promise.all( blocks.retrievedData.map(block => - archiverStore.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + archiverStore.addLogs( + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, + ), ), ); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 5c1292a000e..5f29df0a969 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -48,6 +48,12 @@ export class MemoryArchiverStore implements ArchiverDataStore { */ private txEffects: TxEffect[] = []; + /** + * An array containing all the encrypted logs that have been fetched so far. + * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). + */ + private noteEncryptedLogsPerBlock: EncryptedL2BlockL2Logs[] = []; + /** * An array containing all the encrypted logs that have been fetched so far. * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). @@ -185,10 +191,15 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @returns True if the operation is successful. */ addLogs( + noteEncryptedLogs: EncryptedL2BlockL2Logs, encryptedLogs: EncryptedL2BlockL2Logs, unencryptedLogs: UnencryptedL2BlockL2Logs, blockNumber: number, ): Promise { + if (noteEncryptedLogs) { + this.noteEncryptedLogsPerBlock[blockNumber - INITIAL_L2_BLOCK_NUM] = noteEncryptedLogs; + } + if (encryptedLogs) { this.encryptedLogsPerBlock[blockNumber - INITIAL_L2_BLOCK_NUM] = encryptedLogs; } @@ -308,9 +319,18 @@ export class MemoryArchiverStore implements ArchiverDataStore { if (from < INITIAL_L2_BLOCK_NUM || limit < 1) { throw new Error(`Invalid limit: ${limit}`); } - const logs = ( - logType === LogType.ENCRYPTED ? this.encryptedLogsPerBlock : this.unencryptedLogsPerBlock - ) as L2BlockL2Logs>[]; + const logs = (() => { + switch (logType) { + case LogType.ENCRYPTED: + return this.encryptedLogsPerBlock; + case LogType.NOTEENCRYPTED: + return this.noteEncryptedLogsPerBlock; + case LogType.UNENCRYPTED: + default: + return this.unencryptedLogsPerBlock; + } + })() as L2BlockL2Logs>[]; + if (from > logs.length) { return Promise.resolve([]); } diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index e89316b4230..ed54657b950 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -125,6 +125,7 @@ export { SiblingPath, EncryptedLogHeader, EncryptedLogIncomingBody, + EncryptedLogOutgoingBody, } from '@aztec/circuit-types'; export { NodeInfo } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 200ad930dee..e94c9920359 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -15,7 +15,7 @@ import { type TxHash, type TxReceipt, } from '@aztec/circuit-types'; -import { type AztecAddress, type CompleteAddress, type Fr, type PartialAddress } from '@aztec/circuits.js'; +import { type AztecAddress, type CompleteAddress, type Fq, type Fr, type PartialAddress } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type ContractClassWithId, type ContractInstanceWithAddress } from '@aztec/types/contracts'; import { type NodeInfo } from '@aztec/types/interfaces'; @@ -69,6 +69,9 @@ export abstract class BaseWallet implements Wallet { registerAccount(secretKey: Fr, partialAddress: PartialAddress): Promise { return this.pxe.registerAccount(secretKey, partialAddress); } + rotateMasterNullifierKey(account: AztecAddress, secretKey: Fq): Promise { + return this.pxe.rotateMasterNullifierKey(account, secretKey); + } registerRecipient(account: CompleteAddress): Promise { return this.pxe.registerRecipient(account); } diff --git a/yarn-project/aztec.js/src/wallet/create_recipient.ts b/yarn-project/aztec.js/src/wallet/create_recipient.ts index 765f0fcc74e..1ec56fb6954 100644 --- a/yarn-project/aztec.js/src/wallet/create_recipient.ts +++ b/yarn-project/aztec.js/src/wallet/create_recipient.ts @@ -8,6 +8,8 @@ import { CompleteAddress } from '@aztec/circuits.js'; */ export async function createRecipient(pxe: PXE): Promise { const completeAddress = CompleteAddress.random(); + // docs:start:register-recipient await pxe.registerRecipient(completeAddress); + // docs:end:register-recipient return completeAddress; } diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index d899d2a13a8..60112d96851 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -32,6 +32,12 @@ export type BBFailure = { export type BBResult = BBSuccess | BBFailure; +type BBExecResult = { + status: BB_RESULT; + exitCode: number; + signal: string | undefined; +}; + /** * Invokes the Barretenberg binary with the provided command and args * @param pathToBB - The path to the BB binary @@ -47,26 +53,20 @@ export function executeBB( args: string[], logger: LogFn, resultParser = (code: number) => code === 0, -) { - return new Promise((resolve, reject) => { +): Promise { + return new Promise(resolve => { // spawn the bb process - const bb = proc.spawn(pathToBB, [command, ...args]); - bb.stdout.on('data', data => { - const message = data.toString('utf-8').replace(/\n$/, ''); - logger(message); + const bb = proc.spawn(pathToBB, [command, ...args], { + stdio: 'pipe', }); - bb.stderr.on('data', data => { - const message = data.toString('utf-8').replace(/\n$/, ''); - logger(message); - }); - bb.on('close', (code: number) => { - if (resultParser(code)) { - resolve(BB_RESULT.SUCCESS); + bb.on('close', (exitCode: number, signal?: string) => { + if (resultParser(exitCode)) { + resolve({ status: BB_RESULT.SUCCESS, exitCode, signal }); } else { - reject(); + resolve({ status: BB_RESULT.FAILURE, exitCode, signal }); } }); - }).catch(_ => BB_RESULT.FAILURE); + }).catch(_ => ({ status: BB_RESULT.FAILURE, exitCode: -1, signal: undefined })); } const bytecodeHashFilename = 'bytecode_hash'; @@ -154,14 +154,14 @@ export async function generateKeyForNoirCircuit( const timer = new Timer(); let result = await executeBB(pathToBB, `write_${key}`, args, log); // If we succeeded and the type of key if verification, have bb write the 'fields' version too - if (result == BB_RESULT.SUCCESS && key === 'vk') { + if (result.status == BB_RESULT.SUCCESS && key === 'vk') { const asFieldsArgs = ['-k', `${outputPath}/${VK_FILENAME}`, '-o', `${outputPath}/${VK_FIELDS_FILENAME}`, '-v']; result = await executeBB(pathToBB, `vk_as_fields`, asFieldsArgs, log); } const duration = timer.ms(); // Cleanup the bytecode file await fs.rm(bytecodePath, { force: true }); - if (result == BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { // Store the bytecode hash so we don't need to regenerate at a later time await fs.writeFile(bytecodeHashPath, bytecodeHash); return { @@ -173,7 +173,10 @@ export async function generateKeyForNoirCircuit( }; } // Not a great error message here but it is difficult to decipher what comes from bb - return { status: BB_RESULT.FAILURE, reason: `Failed to generate key` }; + return { + status: BB_RESULT.FAILURE, + reason: `Failed to generate key. Exit code: ${result.exitCode}. Signal ${result.signal}.`, + }; } catch (error) { return { status: BB_RESULT.FAILURE, reason: `${error}` }; } @@ -231,7 +234,7 @@ export async function generateProof( const duration = timer.ms(); // cleanup the bytecode await fs.rm(bytecodePath, { force: true }); - if (result == BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, duration, @@ -241,7 +244,10 @@ export async function generateProof( }; } // Not a great error message here but it is difficult to decipher what comes from bb - return { status: BB_RESULT.FAILURE, reason: `Failed to generate proof` }; + return { + status: BB_RESULT.FAILURE, + reason: `Failed to generate proof. Exit code ${result.exitCode}. Signal ${result.signal}.`, + }; } catch (error) { return { status: BB_RESULT.FAILURE, reason: `${error}` }; } @@ -274,11 +280,14 @@ export async function verifyProof( const timer = new Timer(); const result = await executeBB(pathToBB, 'verify', args, log); const duration = timer.ms(); - if (result == BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, duration }; } // Not a great error message here but it is difficult to decipher what comes from bb - return { status: BB_RESULT.FAILURE, reason: `Failed to verify proof` }; + return { + status: BB_RESULT.FAILURE, + reason: `Failed to verify proof. Exit code ${result.exitCode}. Signal ${result.signal}.`, + }; } catch (error) { return { status: BB_RESULT.FAILURE, reason: `${error}` }; } @@ -311,11 +320,14 @@ export async function writeVkAsFields( const timer = new Timer(); const result = await executeBB(pathToBB, 'vk_as_fields', args, log); const duration = timer.ms(); - if (result == BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, duration, vkPath: verificationKeyPath }; } // Not a great error message here but it is difficult to decipher what comes from bb - return { status: BB_RESULT.FAILURE, reason: `Failed to create vk as fields` }; + return { + status: BB_RESULT.FAILURE, + reason: `Failed to create vk as fields. Exit code ${result.exitCode}. Signal ${result.signal}.`, + }; } catch (error) { return { status: BB_RESULT.FAILURE, reason: `${error}` }; } @@ -348,11 +360,14 @@ export async function writeProofAsFields( const timer = new Timer(); const result = await executeBB(pathToBB, 'proof_as_fields', args, log); const duration = timer.ms(); - if (result == BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, duration, proofPath: proofPath }; } // Not a great error message here but it is difficult to decipher what comes from bb - return { status: BB_RESULT.FAILURE, reason: `Failed to create proof as fields` }; + return { + status: BB_RESULT.FAILURE, + reason: `Failed to create proof as fields. Exit code ${result.exitCode}. Signal ${result.signal}.`, + }; } catch (error) { return { status: BB_RESULT.FAILURE, reason: `${error}` }; } diff --git a/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts b/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts index cef282a7064..5e76f59f489 100644 --- a/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts +++ b/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts @@ -1,4 +1,5 @@ import { type AppCircuitProofOutput, type KernelProofOutput, type ProofCreator } from '@aztec/circuit-types'; +import { type CircuitProvingStats, type CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; import { Fr, NESTED_RECURSIVE_PROOF_LENGTH, @@ -19,6 +20,7 @@ import { siloNoteHash } from '@aztec/circuits.js/hash'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type Tuple } from '@aztec/foundation/serialize'; +import { Timer } from '@aztec/foundation/timer'; import { ClientCircuitArtifacts, type ClientProtocolArtifact, @@ -50,6 +52,7 @@ import { generateProof, verifyProof, } from '../bb/execute.js'; +import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { AGGREGATION_OBJECT_SIZE, CIRCUIT_PUBLIC_INPUTS_INDEX, @@ -58,28 +61,6 @@ import { type VerificationKeyData, } from './verification_key_data.js'; -type PrivateKernelProvingOps = { - convertOutputs: (outputs: WitnessMap) => PrivateKernelCircuitPublicInputs | PrivateKernelTailCircuitPublicInputs; -}; - -const PrivateKernelArtifactMapping: Record = { - PrivateKernelInitArtifact: { - convertOutputs: convertPrivateKernelInitOutputsFromWitnessMap, - }, - PrivateKernelInnerArtifact: { - convertOutputs: convertPrivateKernelInnerOutputsFromWitnessMap, - }, - PrivateKernelTailArtifact: { - convertOutputs: convertPrivateKernelTailOutputsFromWitnessMap, - }, - PrivateKernelResetArtifact: { - convertOutputs: convertPrivateKernelResetOutputsFromWitnessMap, - }, - PrivateKernelTailToPublicArtifact: { - convertOutputs: convertPrivateKernelTailForPublicOutputsFromWitnessMap, - }, -}; - /** * This proof creator implementation uses the native bb binary. * This is a temporary implementation until we make the WASM version work. @@ -109,45 +90,66 @@ export class BBNativeProofCreator implements ProofCreator { public async createProofInit( inputs: PrivateKernelInitCircuitPrivateInputs, ): Promise> { - const witnessMap = convertPrivateKernelInitInputsToWitnessMap(inputs); - return await this.createSafeProof(witnessMap, 'PrivateKernelInitArtifact'); + return await this.createSafeProof( + inputs, + 'PrivateKernelInitArtifact', + convertPrivateKernelInitInputsToWitnessMap, + convertPrivateKernelInitOutputsFromWitnessMap, + ); } public async createProofInner( inputs: PrivateKernelInnerCircuitPrivateInputs, ): Promise> { - const witnessMap = convertPrivateKernelInnerInputsToWitnessMap(inputs); - return await this.createSafeProof(witnessMap, 'PrivateKernelInnerArtifact'); + return await this.createSafeProof( + inputs, + 'PrivateKernelInnerArtifact', + convertPrivateKernelInnerInputsToWitnessMap, + convertPrivateKernelInnerOutputsFromWitnessMap, + ); } public async createProofReset( inputs: PrivateKernelResetCircuitPrivateInputs, ): Promise> { - const witnessMap = convertPrivateKernelResetInputsToWitnessMap(inputs); - return await this.createSafeProof(witnessMap, 'PrivateKernelResetArtifact'); + return await this.createSafeProof( + inputs, + 'PrivateKernelResetArtifact', + convertPrivateKernelResetInputsToWitnessMap, + convertPrivateKernelResetOutputsFromWitnessMap, + ); } public async createProofTail( inputs: PrivateKernelTailCircuitPrivateInputs, ): Promise> { if (!inputs.isForPublic()) { - const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); - return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); + return await this.createSafeProof( + inputs, + 'PrivateKernelTailArtifact', + convertPrivateKernelTailInputsToWitnessMap, + convertPrivateKernelTailOutputsFromWitnessMap, + ); } - const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); - return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); + return await this.createSafeProof( + inputs, + 'PrivateKernelTailToPublicArtifact', + convertPrivateKernelTailToPublicInputsToWitnessMap, + convertPrivateKernelTailForPublicOutputsFromWitnessMap, + ); } public async createAppCircuitProof( partialWitness: Map, bytecode: Buffer, + appCircuitName?: string, ): Promise { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); try { this.log.debug(`Proving app circuit`); - const proofOutput = await this.createProof(directory, partialWitness, bytecode, 'App'); + const proofOutput = await this.createProof(directory, partialWitness, bytecode, 'App', 0, 0, appCircuitName); if (proofOutput.proof.proof.length != RECURSIVE_PROOF_LENGTH) { throw new Error(`Incorrect proof length`); } @@ -276,48 +278,66 @@ export class BBNativeProofCreator implements ProofCreator { return await promise; } - private async createSafeProof( - inputs: WitnessMap, + private async createSafeProof Buffer }, O extends { toBuffer: () => Buffer }>( + inputs: I, circuitType: ClientProtocolArtifact, - ): Promise> { + convertInputs: (inputs: I) => WitnessMap, + convertOutputs: (outputs: WitnessMap) => O, + ): Promise> { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); try { - return await this.generateWitnessAndCreateProof(inputs, circuitType, directory); + return await this.generateWitnessAndCreateProof(inputs, circuitType, directory, convertInputs, convertOutputs); } finally { await fs.rm(directory, { recursive: true, force: true }); this.log.debug(`Deleted directory: ${directory}`); } } - private async generateWitnessAndCreateProof( - inputs: WitnessMap, + private async generateWitnessAndCreateProof< + I extends { toBuffer: () => Buffer }, + O extends { toBuffer: () => Buffer }, + >( + inputs: I, circuitType: ClientProtocolArtifact, directory: string, - ): Promise> { + convertInputs: (inputs: I) => WitnessMap, + convertOutputs: (outputs: WitnessMap) => O, + ): Promise> { this.log.debug(`Generating witness for ${circuitType}`); const compiledCircuit: NoirCompiledCircuit = ClientCircuitArtifacts[circuitType]; - const outputWitness = await this.simulator.simulateCircuit(inputs, compiledCircuit); - - this.log.debug(`Generated witness for ${circuitType}`); - - const publicInputs = PrivateKernelArtifactMapping[circuitType].convertOutputs(outputWitness) as T; + const witnessMap = convertInputs(inputs); + const timer = new Timer(); + const outputWitness = await this.simulator.simulateCircuit(witnessMap, compiledCircuit); + const output = convertOutputs(outputWitness); + + const inputSize = inputs.toBuffer().length; + const outputSize = output.toBuffer().length; + this.log.debug(`Generated witness for ${circuitType}`, { + eventName: 'circuit-witness-generation', + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + duration: timer.ms(), + inputSize, + outputSize, + } satisfies CircuitWitnessGenerationStats); const proofOutput = await this.createProof( directory, outputWitness, Buffer.from(compiledCircuit.bytecode, 'base64'), circuitType, + inputSize, + outputSize, ); if (proofOutput.proof.proof.length != NESTED_RECURSIVE_PROOF_LENGTH) { throw new Error(`Incorrect proof length`); } const nestedProof = proofOutput.proof as RecursiveProof; - const kernelOutput: KernelProofOutput = { - publicInputs, + const kernelOutput: KernelProofOutput = { + publicInputs: output, proof: nestedProof, verificationKey: proofOutput.verificationKey, }; @@ -329,6 +349,9 @@ export class BBNativeProofCreator implements ProofCreator { partialWitness: WitnessMap, bytecode: Buffer, circuitType: ClientProtocolArtifact | 'App', + inputSize: number, + outputSize: number, + appCircuitName?: string, ): Promise<{ proof: RecursiveProof | RecursiveProof; verificationKey: VerificationKeyAsFields; @@ -358,11 +381,36 @@ export class BBNativeProofCreator implements ProofCreator { if (circuitType === 'App') { const vkData = await this.convertVk(directory); const proof = await this.readProofAsFields(directory, circuitType, vkData); + + this.log.debug(`Generated proof`, { + eventName: 'circuit-proving', + circuitName: 'app-circuit', + duration: provingResult.duration, + inputSize, + outputSize, + proofSize: proof.binaryProof.buffer.length, + appCircuitName, + circuitSize: vkData.circuitSize, + numPublicInputs: vkData.numPublicInputs, + } as CircuitProvingStats); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } const vkData = await this.updateVerificationKeyAfterProof(directory, circuitType); const proof = await this.readProofAsFields(directory, circuitType, vkData); + + this.log.debug(`Generated proof`, { + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + duration: provingResult.duration, + eventName: 'circuit-proving', + inputSize, + outputSize, + proofSize: proof.binaryProof.buffer.length, + circuitSize: vkData.circuitSize, + numPublicInputs: vkData.numPublicInputs, + } as CircuitProvingStats); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index bca2596b015..40c65c829d0 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -7,6 +7,7 @@ import { type ServerCircuitProver, makePublicInputsAndProof, } from '@aztec/circuit-types'; +import { type CircuitProvingStats, type CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, @@ -14,12 +15,11 @@ import { Fr, type KernelCircuitPublicInputs, type MergeRollupInputs, - type NESTED_RECURSIVE_PROOF_LENGTH, - type ParityPublicInputs, + NESTED_RECURSIVE_PROOF_LENGTH, type PreviousRollupData, Proof, type PublicKernelCircuitPublicInputs, - type RECURSIVE_PROOF_LENGTH, + RECURSIVE_PROOF_LENGTH, RecursiveProof, RollupTypes, RootParityInput, @@ -65,7 +65,7 @@ import { verifyProof, } from '../bb/execute.js'; import { PublicKernelArtifactMapping } from '../mappings/mappings.js'; -import { circuitTypeToCircuitName, emitCircuitProvingStats, emitCircuitWitnessGenerationStats } from '../stats.js'; +import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { AGGREGATION_OBJECT_SIZE, CIRCUIT_PUBLIC_INPUTS_INDEX, @@ -114,11 +114,11 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @returns The public inputs of the parity circuit. */ public async getBaseParityProof(inputs: BaseParityInputs): Promise> { - const witnessMap = convertBaseParityInputsToWitnessMap(inputs); - - const [circuitOutput, proof] = await this.createRecursiveProof( - witnessMap, + const [circuitOutput, proof] = await this.createRecursiveProof( + inputs, 'BaseParityArtifact', + RECURSIVE_PROOF_LENGTH, + convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, ); @@ -137,12 +137,13 @@ export class BBNativeRollupProver implements ServerCircuitProver { public async getRootParityProof( inputs: RootParityInputs, ): Promise> { - const witnessMap = convertRootParityInputsToWitnessMap(inputs); - - const [circuitOutput, proof] = await this.createRecursiveProof< - typeof NESTED_RECURSIVE_PROOF_LENGTH, - ParityPublicInputs - >(witnessMap, 'RootParityArtifact', convertRootParityOutputsFromWitnessMap); + const [circuitOutput, proof] = await this.createRecursiveProof( + inputs, + 'RootParityArtifact', + NESTED_RECURSIVE_PROOF_LENGTH, + convertRootParityInputsToWitnessMap, + convertRootParityOutputsFromWitnessMap, + ); const verificationKey = await this.getVerificationKeyDataForCircuit('RootParityArtifact'); @@ -163,11 +164,13 @@ export class BBNativeRollupProver implements ServerCircuitProver { if (kernelOps === undefined) { throw new Error(`Unable to prove kernel type ${PublicKernelType[kernelRequest.type]}`); } - const witnessMap = kernelOps.convertInputs(kernelRequest.inputs); - - const [outputWitness, proof] = await this.createProof(witnessMap, kernelOps.artifact); + const [result, proof] = await this.createProof( + kernelRequest.inputs, + kernelOps.artifact, + kernelOps.convertInputs, + kernelOps.convertOutputs, + ); - const result = kernelOps.convertOutputs(outputWitness); return makePublicInputsAndProof(result, proof); } @@ -179,11 +182,13 @@ export class BBNativeRollupProver implements ServerCircuitProver { public async getPublicTailProof( kernelRequest: PublicKernelTailRequest, ): Promise> { - const witnessMap = convertPublicTailInputsToWitnessMap(kernelRequest.inputs); - - const [outputWitness, proof] = await this.createProof(witnessMap, 'PublicKernelTailArtifact'); + const [result, proof] = await this.createProof( + kernelRequest.inputs, + 'PublicKernelTailArtifact', + convertPublicTailInputsToWitnessMap, + convertPublicTailOutputFromWitnessMap, + ); - const result = convertPublicTailOutputFromWitnessMap(outputWitness); return makePublicInputsAndProof(result, proof); } @@ -195,11 +200,12 @@ export class BBNativeRollupProver implements ServerCircuitProver { public async getBaseRollupProof( input: BaseRollupInputs, ): Promise> { - const witnessMap = convertBaseRollupInputsToWitnessMap(input); - - const [outputWitness, proof] = await this.createProof(witnessMap, 'BaseRollupArtifact'); - - const result = convertBaseRollupOutputsFromWitnessMap(outputWitness); + const [result, proof] = await this.createProof( + input, + 'BaseRollupArtifact', + convertBaseRollupInputsToWitnessMap, + convertBaseRollupOutputsFromWitnessMap, + ); return makePublicInputsAndProof(result, proof); } @@ -214,11 +220,12 @@ export class BBNativeRollupProver implements ServerCircuitProver { // verify both inputs await Promise.all(input.previousRollupData.map(prev => this.verifyPreviousRollupProof(prev))); - const witnessMap = convertMergeRollupInputsToWitnessMap(input); - - const [outputWitness, proof] = await this.createProof(witnessMap, 'MergeRollupArtifact'); - - const result = convertMergeRollupOutputsFromWitnessMap(outputWitness); + const [result, proof] = await this.createProof( + input, + 'MergeRollupArtifact', + convertMergeRollupInputsToWitnessMap, + convertMergeRollupOutputsFromWitnessMap, + ); return makePublicInputsAndProof(result, proof); } @@ -232,18 +239,25 @@ export class BBNativeRollupProver implements ServerCircuitProver { // verify the inputs await Promise.all(input.previousRollupData.map(prev => this.verifyPreviousRollupProof(prev))); - const witnessMap = convertRootRollupInputsToWitnessMap(input); - - const [outputWitness, proof] = await this.createProof(witnessMap, 'RootRollupArtifact'); + const [result, proof] = await this.createProof( + input, + 'RootRollupArtifact', + convertRootRollupInputsToWitnessMap, + convertRootRollupOutputsFromWitnessMap, + ); await this.verifyProof('RootRollupArtifact', proof); - const result = convertRootRollupOutputsFromWitnessMap(outputWitness); return makePublicInputsAndProof(result, proof); } // TODO(@PhilWindle): Delete when no longer required - public async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { + public async createProof Buffer }, Output extends { toBuffer: () => Buffer }>( + input: Input, + circuitType: ServerProtocolArtifact, + convertInput: (input: Input) => WitnessMap, + convertOutput: (outputWitness: WitnessMap) => Output, + ): Promise<[Output, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -265,15 +279,16 @@ export class BBNativeRollupProver implements ServerCircuitProver { logger.debug(`Generating witness data for ${circuitType}`); + const witnessMap = convertInput(input); const timer = new Timer(); const outputWitness = await simulator.simulateCircuit(witnessMap, artifact); - emitCircuitWitnessGenerationStats( - circuitTypeToCircuitName(circuitType), - timer.ms(), - witnessMap.size * Fr.SIZE_IN_BYTES, - outputWitness.size * Fr.SIZE_IN_BYTES, - logger, - ); + logger.debug(`Generated witness`, { + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + duration: timer.ms(), + inputSize: witnessMap.size * Fr.SIZE_IN_BYTES, + outputSize: outputWitness.size * Fr.SIZE_IN_BYTES, + eventName: 'circuit-witness-generation', + } satisfies CircuitWitnessGenerationStats); // Now prove the circuit from the generated witness logger.debug(`Proving ${circuitType}...`); @@ -293,38 +308,51 @@ export class BBNativeRollupProver implements ServerCircuitProver { } // Ensure our vk cache is up to date - await this.updateVerificationKeyAfterProof(provingResult.vkPath!, circuitType); + const vkData = await this.updateVerificationKeyAfterProof(provingResult.vkPath!, circuitType); // Read the proof and then cleanup up our temporary directory - const proof = await fs.readFile(`${provingResult.proofPath!}/${PROOF_FILENAME}`); - - // does not include reading the proof from disk above because duration comes from the bb wrapper - emitCircuitProvingStats( - circuitTypeToCircuitName(circuitType), - provingResult.duration, - witnessMap.size * Fr.SIZE_IN_BYTES, - outputWitness.size * Fr.SIZE_IN_BYTES, - proof.length, - logger, - ); + const rawProof = await fs.readFile(`${provingResult.proofPath!}/${PROOF_FILENAME}`); await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); - logger.info(`Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proof.length} fields`); + const output = convertOutput(outputWitness); + const proof = new Proof(rawProof); + logger.info( + `Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proof.buffer.length} fields`, + { + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + // does not include reading the proof from disk + duration: provingResult.duration, + proofSize: proof.buffer.length, + eventName: 'circuit-proving', + inputSize: input.toBuffer().length, + outputSize: output.toBuffer().length, + circuitSize: vkData.circuitSize, + numPublicInputs: vkData.numPublicInputs, + } satisfies CircuitProvingStats, + ); - return [outputWitness, new Proof(proof)]; + return [output, proof]; } /** * Executes a circuit and returns it's outputs and corresponding proof with embedded aggregation object * @param witnessMap - The input witness * @param circuitType - The type of circuit to be executed + * @param proofLength - The length of the proof to be generated. This is a dummy parameter to aid in type checking + * @param convertInput - Function for mapping the input object to a witness map. * @param convertOutput - Function for parsing the output witness to it's corresponding object * @returns The circuits output object and it's proof */ - public async createRecursiveProof( - witnessMap: WitnessMap, + public async createRecursiveProof< + PROOF_LENGTH extends number, + CircuitInputType extends { toBuffer: () => Buffer }, + CircuitOutputType extends { toBuffer: () => Buffer }, + >( + input: CircuitInputType, circuitType: ServerProtocolArtifact, + proofLength: PROOF_LENGTH, + convertInput: (input: CircuitInputType) => WitnessMap, convertOutput: (outputWitness: WitnessMap) => CircuitOutputType, ): Promise<[CircuitOutputType, RecursiveProof]> { // Create random directory to be used for temp files @@ -350,17 +378,20 @@ export class BBNativeRollupProver implements ServerCircuitProver { logger.debug(`Generating witness data for ${circuitType}`); const timer = new Timer(); + const witnessMap = convertInput(input); const outputWitness = await simulator.simulateCircuit(witnessMap, artifact); - emitCircuitWitnessGenerationStats( - circuitTypeToCircuitName(circuitType), - timer.ms(), - witnessMap.size * Fr.SIZE_IN_BYTES, - outputWitness.size * Fr.SIZE_IN_BYTES, - logger, - ); + const output = convertOutput(outputWitness); - const outputType = convertOutput(outputWitness); + const inputSize = input.toBuffer().length; + const outputSize = output.toBuffer().length; + logger.debug(`Generated witness`, { + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + duration: timer.ms(), + inputSize, + outputSize, + eventName: 'circuit-witness-generation', + } satisfies CircuitWitnessGenerationStats); // Now prove the circuit from the generated witness logger.debug(`Proving ${circuitType}...`); @@ -380,25 +411,26 @@ export class BBNativeRollupProver implements ServerCircuitProver { } // Ensure our vk cache is up to date - await this.updateVerificationKeyAfterProof(provingResult.vkPath!, circuitType); + const vkData = await this.updateVerificationKeyAfterProof(provingResult.vkPath!, circuitType); // Read the proof and then cleanup up our temporary directory - const proof = await this.readProofAsFields(provingResult.proofPath!, circuitType); + const proof = await this.readProofAsFields(provingResult.proofPath!, circuitType, proofLength); logger.info( `Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proof.proof.length} fields`, + { + circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + circuitSize: vkData.circuitSize, + duration: provingResult.duration, + inputSize, + outputSize, + proofSize: proof.binaryProof.buffer.length, + eventName: 'circuit-proving', + numPublicInputs: vkData.numPublicInputs, + } satisfies CircuitProvingStats, ); - emitCircuitProvingStats( - circuitTypeToCircuitName(circuitType), - provingResult.duration, - witnessMap.size * Fr.SIZE_IN_BYTES, - outputWitness.size * Fr.SIZE_IN_BYTES, - proof.binaryProof.buffer.length, - logger, - ); - - return [outputType, proof]; + return [output, proof]; } finally { await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); } @@ -515,13 +547,16 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @param filePath - The directory containing the verification key data files * @param circuitType - The type of circuit to which the verification key corresponds */ - private async updateVerificationKeyAfterProof(filePath: string, circuitType: ServerProtocolArtifact) { + private async updateVerificationKeyAfterProof( + filePath: string, + circuitType: ServerProtocolArtifact, + ): Promise { let promise = this.verificationKeys.get(circuitType); if (!promise) { promise = this.convertVk(filePath); this.verificationKeys.set(circuitType, promise); } - await promise; + return promise; } /** @@ -533,6 +568,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { private async readProofAsFields( filePath: string, circuitType: ServerProtocolArtifact, + proofLength: PROOF_LENGTH, ): Promise> { const [binaryProof, proofString] = await Promise.all([ fs.readFile(`${filePath}/${PROOF_FILENAME}`), @@ -552,6 +588,10 @@ export class BBNativeRollupProver implements ServerCircuitProver { `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, ); const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); + if (proof.proof.length !== proofLength) { + throw new Error("Proof length doesn't match expected length"); + } + return proof; } } diff --git a/yarn-project/bb-prover/src/stats.ts b/yarn-project/bb-prover/src/stats.ts index fbd68bde90b..9627d0db8f9 100644 --- a/yarn-project/bb-prover/src/stats.ts +++ b/yarn-project/bb-prover/src/stats.ts @@ -1,45 +1,6 @@ import { type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; -import type { CircuitName, CircuitProvingStats, CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; -import { type Logger } from '@aztec/foundation/log'; -import { type ServerProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; - -export function emitCircuitWitnessGenerationStats( - circuitName: CircuitName, - duration: number, - inputSize: number, - outputSize: number, - logger: Logger, -) { - const stats: CircuitWitnessGenerationStats = { - eventName: 'circuit-witness-generation', - circuitName, - inputSize, - outputSize, - duration, - }; - - logger.debug('Circuit witness generation stats', stats); -} - -export function emitCircuitProvingStats( - circuitName: CircuitName, - duration: number, - inputSize: number, - outputSize: number, - proofSize: number, - logger: Logger, -) { - const stats: CircuitProvingStats = { - eventName: 'circuit-proving', - circuitName, - duration, - inputSize, - outputSize, - proofSize, - }; - - logger.debug('Circuit proving stats', stats); -} +import type { CircuitName } from '@aztec/circuit-types/stats'; +import { type ClientProtocolArtifact, type ServerProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; export function mapPublicKernelToCircuitName(kernelType: PublicKernelRequest['type']): CircuitName { switch (kernelType) { @@ -56,8 +17,10 @@ export function mapPublicKernelToCircuitName(kernelType: PublicKernelRequest['ty } } -export function circuitTypeToCircuitName(circuitType: ServerProtocolArtifact): CircuitName { - switch (circuitType) { +export function mapProtocolArtifactNameToCircuitName( + artifact: ServerProtocolArtifact | ClientProtocolArtifact, +): CircuitName { + switch (artifact) { case 'BaseParityArtifact': return 'base-parity'; case 'RootParityArtifact': @@ -76,7 +39,17 @@ export function circuitTypeToCircuitName(circuitType: ServerProtocolArtifact): C return 'public-kernel-teardown'; case 'PublicKernelTailArtifact': return 'public-kernel-tail'; + case 'PrivateKernelInitArtifact': + return 'private-kernel-init'; + case 'PrivateKernelInnerArtifact': + return 'private-kernel-inner'; + case 'PrivateKernelTailArtifact': + return 'private-kernel-tail'; + case 'PrivateKernelTailToPublicArtifact': + return 'private-kernel-tail-to-public'; + case 'PrivateKernelResetArtifact': + return 'private-kernel-reset'; default: - throw new Error(`Unknown circuit type: ${circuitType}`); + throw new Error(`Unknown circuit type: ${artifact}`); } } diff --git a/yarn-project/circuit-types/src/body.ts b/yarn-project/circuit-types/src/body.ts index 6bb1146c395..967076a8247 100644 --- a/yarn-project/circuit-types/src/body.ts +++ b/yarn-project/circuit-types/src/body.ts @@ -76,6 +76,12 @@ export class Body { return computeRoot(leaves); } + get noteEncryptedLogs(): EncryptedL2BlockL2Logs { + const logs = this.txEffects.map(txEffect => txEffect.noteEncryptedLogs); + + return new EncryptedL2BlockL2Logs(logs); + } + get encryptedLogs(): EncryptedL2BlockL2Logs { const logs = this.txEffects.map(txEffect => txEffect.encryptedLogs); diff --git a/yarn-project/circuit-types/src/interfaces/proof_creator.ts b/yarn-project/circuit-types/src/interfaces/proof_creator.ts index 9c66dcbdcb1..7a3ef4042dc 100644 --- a/yarn-project/circuit-types/src/interfaces/proof_creator.ts +++ b/yarn-project/circuit-types/src/interfaces/proof_creator.ts @@ -103,7 +103,12 @@ export interface ProofCreator { * * @param partialWitness - The witness produced via circuit simulation * @param bytecode - The circuit bytecode in gzipped bincode format + * @param appCircuitName - Optionally specify the name of the app circuit * @returns A Promise resolving to a Proof object */ - createAppCircuitProof(partialWitness: WitnessMap, bytecode: Buffer): Promise; + createAppCircuitProof( + partialWitness: WitnessMap, + bytecode: Buffer, + appCircuitName?: string, + ): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 9e01820e4f7..0ad069a0ceb 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -1,4 +1,4 @@ -import { type AztecAddress, type CompleteAddress, type Fr, type PartialAddress } from '@aztec/circuits.js'; +import { type AztecAddress, type CompleteAddress, type Fq, type Fr, type PartialAddress } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type ContractClassWithId, type ContractInstanceWithAddress } from '@aztec/types/contracts'; import { type NodeInfo } from '@aztec/types/interfaces'; @@ -61,6 +61,8 @@ export interface PXE { */ registerAccount(secretKey: Fr, partialAddress: PartialAddress): Promise; + rotateMasterNullifierKey(account: AztecAddress, secretKey: Fq): Promise; + /** * Registers a recipient in PXE. This is required when sending encrypted notes to * a user who hasn't deployed their account contract yet. Since their account is not deployed, their diff --git a/yarn-project/circuit-types/src/keys/key_store.ts b/yarn-project/circuit-types/src/keys/key_store.ts index 256db6e36a9..1dbeeb7831c 100644 --- a/yarn-project/circuit-types/src/keys/key_store.ts +++ b/yarn-project/circuit-types/src/keys/key_store.ts @@ -1,6 +1,7 @@ import { type AztecAddress, type CompleteAddress, + type Fq, type Fr, type GrumpkinPrivateKey, type PartialAddress, @@ -32,12 +33,12 @@ export interface KeyStore { getAccounts(): Promise; /** - * Gets the master nullifier public key for a given account or master nullifier public key hash. - * @throws If the account does not exist in the key store. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Gets the master nullifier public key for a given master nullifier public key hash. + * @throws If the account corresponding to the master nullifier public key hash does not exist in the key store. + * @param npkMHash - The master nullifier public key hash. * @returns The master nullifier public key for the account. */ - getMasterNullifierPublicKey(accountOrNpkMHash: AztecAddress | Fr): Promise; + getMasterNullifierPublicKey(npkMHash: Fr): Promise; /** * Gets the master incoming viewing public key for a given account. @@ -64,13 +65,13 @@ export interface KeyStore { getMasterTaggingPublicKey(account: AztecAddress): Promise; /** - * Derives and returns the application nullifier secret key for a given account or master nullifier public key hash. - * @throws If the account does not exist in the key store. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Derives and returns the application nullifier secret key for a given master nullifier public key hash. + * @throws If the account corresponding to the master nullifier public key hash does not exist in the key store. + * @param npkMHash - The master nullifier public key hash. * @param app - The application address to retrieve the nullifier secret key for. * @returns A Promise that resolves to the application nullifier secret key. */ - getAppNullifierSecretKey(accountOrNpkMHash: AztecAddress | Fr, app: AztecAddress): Promise; + getAppNullifierSecretKey(npkMHash: Fr, app: AztecAddress): Promise; /** * Retrieves application incoming viewing secret key. @@ -117,4 +118,6 @@ export interface KeyStore { * @returns A Promise that resolves to the public keys hash. */ getPublicKeysHash(account: AztecAddress): Promise; + + rotateMasterNullifierKey(account: AztecAddress, secretKey: Fq): Promise; } diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index 1b80876ebb5..ae44827cfcb 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -203,6 +203,14 @@ export class L2Block { */ getStats() { const logsStats = { + noteEncryptedLogLength: this.body.txEffects.reduce( + (logCount, txEffect) => logCount + txEffect.noteEncryptedLogs.getSerializedLength(), + 0, + ), + noteEncryptedLogCount: this.body.txEffects.reduce( + (logCount, txEffect) => logCount + txEffect.noteEncryptedLogs.getTotalLogCount(), + 0, + ), encryptedLogLength: this.body.txEffects.reduce( (logCount, txEffect) => logCount + txEffect.encryptedLogs.getSerializedLength(), 0, @@ -212,11 +220,11 @@ export class L2Block { 0, ), unencryptedLogCount: this.body.txEffects.reduce( - (logCount, txEffect) => logCount + txEffect.unencryptedLogs.getSerializedLength(), + (logCount, txEffect) => logCount + txEffect.unencryptedLogs.getTotalLogCount(), 0, ), unencryptedLogSize: this.body.txEffects.reduce( - (logCount, txEffect) => logCount + txEffect.unencryptedLogs.getTotalLogCount(), + (logCount, txEffect) => logCount + txEffect.unencryptedLogs.getSerializedLength(), 0, ), }; diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.test.ts new file mode 100644 index 00000000000..5f7a35079e5 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.test.ts @@ -0,0 +1,63 @@ +import { AztecAddress, GrumpkinScalar } from '@aztec/circuits.js'; +import { Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { updateInlineTestData } from '@aztec/foundation/testing'; + +import { EncryptedLogOutgoingBody } from './encrypted_log_outgoing_body.js'; + +describe('encrypt log outgoing body', () => { + let grumpkin: Grumpkin; + + beforeAll(() => { + grumpkin = new Grumpkin(); + }); + + it('encrypt and decrypt a log outgoing body', () => { + const ephSk = GrumpkinScalar.random(); + const recipientIvskApp = GrumpkinScalar.random(); + const senderOvskApp = GrumpkinScalar.random(); + + const ephPk = grumpkin.mul(Grumpkin.generator, ephSk); + const recipientIvpkApp = grumpkin.mul(Grumpkin.generator, recipientIvskApp); + + const recipientAddress = AztecAddress.random(); + + const body = new EncryptedLogOutgoingBody(ephSk, recipientAddress, recipientIvpkApp); + + const encrypted = body.computeCiphertext(senderOvskApp, ephPk); + + const recreated = EncryptedLogOutgoingBody.fromCiphertext(encrypted, senderOvskApp, ephPk); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); + + it('encrypt a log outgoing body, generate input for noir test', () => { + const ephSk = new GrumpkinScalar(0x0f096b423017226a18461115fa8d34bbd0d302ee245dfaf2807e604eec4715fen); + const recipientIvskApp = new GrumpkinScalar(0x0f4d97c25d578f9348251a71ca17ae314828f8f95676ebb481df163f87fd4022n); + const senderOvskApp = new GrumpkinScalar(0x089c6887cb1446d86c64e81afc78048b74d2e28c6bc5176ac02cf7c7d36a444en); + + const ephPk = grumpkin.mul(Grumpkin.generator, ephSk); + const recipientIvpkApp = grumpkin.mul(Grumpkin.generator, recipientIvskApp); + + const recipientAddress = AztecAddress.fromBigInt(BigInt('0xdeadbeef')); + + const body = new EncryptedLogOutgoingBody(ephSk, recipientAddress, recipientIvpkApp); + + const encrypted = body.computeCiphertext(senderOvskApp, ephPk); + + const recreated = EncryptedLogOutgoingBody.fromCiphertext(encrypted, senderOvskApp, ephPk); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + + const byteArrayString = `[${encrypted + .toString('hex') + .match(/.{1,2}/g)! + .map(byte => parseInt(byte, 16))}]`; + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr', + 'expected_outgoing_body_ciphertext', + byteArrayString, + ); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.ts b/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.ts new file mode 100644 index 00000000000..e3fb98a7404 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_outgoing_body.ts @@ -0,0 +1,99 @@ +import { AztecAddress, Fr, GeneratorIndex, GrumpkinPrivateKey, Point, type PublicKey } from '@aztec/circuits.js'; +import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { poseidon2Hash } from '@aztec/foundation/crypto'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +export class EncryptedLogOutgoingBody { + constructor(public ephSk: GrumpkinPrivateKey, public recipient: AztecAddress, public recipientIvpkApp: PublicKey) {} + + /** + * Serializes the log body + * + * @returns The serialized log body + */ + public toBuffer(): Buffer { + // The serialization of Fq is [high, low] check `grumpkin_private_key.nr` + const ephSkBytes = serializeToBuffer([this.ephSk.high, this.ephSk.low]); + return serializeToBuffer(ephSkBytes, this.recipient, this.recipientIvpkApp); + } + + /** + * Deserialized the log body from a buffer + * + * @param buf - The buffer to deserialize + * @returns The deserialized log body + */ + public static fromBuffer(buf: Buffer): EncryptedLogOutgoingBody { + const reader = BufferReader.asReader(buf); + const high = reader.readObject(Fr); + const low = reader.readObject(Fr); + const ephSk = GrumpkinPrivateKey.fromHighLow(high, low); + const recipient = reader.readObject(AztecAddress); + const recipientIvpkApp = reader.readObject(Point); // PublicKey = Point + + return new EncryptedLogOutgoingBody(ephSk, recipient, recipientIvpkApp); + } + + /** + * Encrypts a log body + * + * @param ovskApp - The app siloed outgoing viewing secret key + * @param ephPk - The ephemeral public key + * + * @returns The ciphertext of the encrypted log body + */ + public computeCiphertext(ovskApp: GrumpkinPrivateKey, ephPk: PublicKey) { + // We could use `ephSk` and compute `ephPk` from it. + // We mainly provide it to keep the same api and potentially slight optimization as we can reuse it. + + const aesSecret = EncryptedLogOutgoingBody.derivePoseidonAESSecret(ovskApp, ephPk); + + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = this.toBuffer(); + + return aes128.encryptBufferCBC(buffer, iv, key); + } + + /** + * Decrypts a log body + * + * @param ciphertext - The ciphertext buffer + * @param ovskApp - The app siloed outgoing viewing secret key + * @param ephPk - The ephemeral public key + * + * @returns The decrypted log body + */ + public static fromCiphertext( + ciphertext: Buffer | bigint[], + ovskApp: GrumpkinPrivateKey, + ephPk: PublicKey, + ): EncryptedLogOutgoingBody { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + + const aesSecret = EncryptedLogOutgoingBody.derivePoseidonAESSecret(ovskApp, ephPk); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = aes128.decryptBufferCBC(input, iv, key); + + return EncryptedLogOutgoingBody.fromBuffer(buffer); + } + + /** + * Derives an AES symmetric key from the app siloed outgoing viewing secret key + * and the ephemeral public key using poseidon. + * + * @param ovskApp - The app siloed outgoing viewing secret key + * @param ephPk - The ephemeral public key + * @returns + */ + static derivePoseidonAESSecret(ovskApp: GrumpkinPrivateKey, ephPk: PublicKey) { + // For performance reasons, we do NOT use the usual `deriveAESSecret` function here + // Instead we compute the using using poseidon + return poseidon2Hash([ovskApp.high, ovskApp.low, ephPk.x, ephPk.y, GeneratorIndex.SYMMETRIC_KEY]).toBuffer(); + } +} diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_payload.test.ts new file mode 100644 index 00000000000..bb62424faf0 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_payload.test.ts @@ -0,0 +1,42 @@ +import { AztecAddress, GrumpkinScalar } from '@aztec/circuits.js'; +import { Grumpkin } from '@aztec/circuits.js/barretenberg'; + +import { EncryptedLogPayload } from './encrypted_log_payload.js'; +import { L1NotePayload } from './l1_note_payload/l1_note_payload.js'; + +describe('encrypt and decrypt a full log', () => { + let grumpkin: Grumpkin; + + let ovsk: GrumpkinScalar; + let ivsk: GrumpkinScalar; + + let payload: EncryptedLogPayload; + let encrypted: Buffer; + + beforeAll(() => { + grumpkin = new Grumpkin(); + + ovsk = GrumpkinScalar.random(); + ivsk = GrumpkinScalar.random(); + + const ephSk = GrumpkinScalar.random(); + + const recipientAddress = AztecAddress.random(); + const ivpk = grumpkin.mul(Grumpkin.generator, ivsk); + + payload = EncryptedLogPayload.fromL1NotePayload(L1NotePayload.random()); + encrypted = payload.encrypt(ephSk, recipientAddress, ivpk, ovsk); + }); + + it('decrypt a log as incoming', () => { + const recreated = EncryptedLogPayload.decryptAsIncoming(encrypted, ivsk); + + expect(recreated.toBuffer()).toEqual(payload.toBuffer()); + }); + + it('decrypt a log as outgoing', () => { + const recreated = EncryptedLogPayload.decryptAsOutgoing(encrypted, ovsk); + + expect(recreated.toBuffer()).toEqual(payload.toBuffer()); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/encrypted_log_payload.ts new file mode 100644 index 00000000000..6ef1cc82add --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_payload.ts @@ -0,0 +1,209 @@ +import { + AztecAddress, + Fr, + type GrumpkinPrivateKey, + Point, + type PublicKey, + computeIvpkApp, + computeIvskApp, + computeOvskApp, + derivePublicKeyFromSecretKey, +} from '@aztec/circuits.js'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { EncryptedLogHeader } from './encrypted_log_header.js'; +import { EncryptedLogIncomingBody } from './encrypted_log_incoming_body.js'; +import { EncryptedLogOutgoingBody } from './encrypted_log_outgoing_body.js'; +import { type L1NotePayload } from './l1_note_payload/l1_note_payload.js'; +import { Note } from './l1_note_payload/note.js'; + +// A placeholder tag until we have a proper tag system in place. +const PLACEHOLDER_TAG = new Fr(33); + +// Both the incoming and the outgoing header are 48 bytes. +// 32 bytes for the address, and 16 bytes padding to follow PKCS#7 +const HEADER_SIZE = 48; + +// The outgoing body is constant size of 176 bytes. +// 160 bytes for the secret key, address, and public key, and 16 bytes padding to follow PKCS#7 +const OUTGOING_BODY_SIZE = 176; + +export class EncryptedLogPayload { + constructor( + /** + * A note as emitted from Noir contract. Can be used along with private key to compute nullifier. + */ + public note: Note, + /** + * Address of the contract this tx is interacting with. + */ + public contractAddress: AztecAddress, + /** + * Storage slot of the underlying note. + */ + public storageSlot: Fr, + /** + * Type identifier for the underlying note, required to determine how to compute its hash and nullifier. + */ + public noteTypeId: Fr, + ) {} + + toBuffer() { + return serializeToBuffer([this.note, this.contractAddress, this.storageSlot, this.noteTypeId]); + } + + static fromBuffer(buffer: Buffer | BufferReader): EncryptedLogPayload { + const reader = BufferReader.asReader(buffer); + return new EncryptedLogPayload( + reader.readObject(Note), + reader.readObject(AztecAddress), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + ); + } + + static fromL1NotePayload(l1NotePayload: L1NotePayload) { + return new EncryptedLogPayload( + l1NotePayload.note, + l1NotePayload.contractAddress, + l1NotePayload.storageSlot, + l1NotePayload.noteTypeId, + ); + } + + /** + * Encrypts a note payload for a given recipient and sender. + * Creates an incoming log the the recipient using the recipient's ivsk, and + * an outgoing log for the sender using the sender's ovsk. + * + * @param ephSk - An ephemeral secret key used for the encryption + * @param recipient - The recipient address, retrievable by the sender for his logs + * @param ivpk - The incoming viewing public key of the recipient + * @param ovsk - The outgoing viewing secret key of the sender + * @returns A buffer containing the encrypted log payload + */ + public encrypt(ephSk: GrumpkinPrivateKey, recipient: AztecAddress, ivpk: PublicKey, ovsk: GrumpkinPrivateKey) { + const ephPk = derivePublicKeyFromSecretKey(ephSk); + const ovpk = derivePublicKeyFromSecretKey(ovsk); + + const header = new EncryptedLogHeader(this.contractAddress); + + const incomingHeaderCiphertext = header.computeCiphertext(ephSk, ivpk); + const outgoingHeaderCiphertext = header.computeCiphertext(ephSk, ovpk); + + const ivpkApp = computeIvpkApp(ivpk, this.contractAddress); + + const incomingBodyCiphertext = new EncryptedLogIncomingBody( + this.storageSlot, + this.noteTypeId, + this.note, + ).computeCiphertext(ephSk, ivpkApp); + + const ovskApp = computeOvskApp(ovsk, this.contractAddress); + + const outgoingBodyCiphertext = new EncryptedLogOutgoingBody(ephSk, recipient, ivpkApp).computeCiphertext( + ovskApp, + ephPk, + ); + + return Buffer.concat([ + PLACEHOLDER_TAG.toBuffer(), + PLACEHOLDER_TAG.toBuffer(), + ephPk.toBuffer(), + incomingHeaderCiphertext, + outgoingHeaderCiphertext, + outgoingBodyCiphertext, + incomingBodyCiphertext, + ]); + } + + /** + * Decrypts a ciphertext as an incoming log. + * + * This is executable by the recipient of the note, and uses the ivsk to decrypt the payload. + * The outgoing parts of the log are ignored entirely. + * + * Produces the same output as `decryptAsOutgoing`. + * + * @param ciphertext - The ciphertext for the log + * @param ivsk - The incoming viewing secret key, used to decrypt the logs + * @returns The decrypted log payload + */ + public static decryptAsIncoming(ciphertext: Buffer | bigint[], ivsk: GrumpkinPrivateKey) { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + const reader = BufferReader.asReader(input); + + // We don't use the tags as part of the decryption here, we just gotta read to skip them. + reader.readObject(Fr); // incoming tag + reader.readObject(Fr); // outgoing tag + + const ephPk = reader.readObject(Point); + + const incomingHeader = EncryptedLogHeader.fromCiphertext(reader.readBytes(HEADER_SIZE), ivsk, ephPk); + + // Skipping the outgoing header and body + reader.readBytes(HEADER_SIZE); + reader.readBytes(OUTGOING_BODY_SIZE); + + // The incoming can be of variable size, so we read until the end + const incomingBodySlice = reader.readToEnd(); + + const ivskApp = computeIvskApp(ivsk, incomingHeader.address); + const incomingBody = EncryptedLogIncomingBody.fromCiphertext(incomingBodySlice, ivskApp, ephPk); + + return new EncryptedLogPayload( + incomingBody.note, + incomingHeader.address, + incomingBody.storageSlot, + incomingBody.noteTypeId, + ); + } + + /** + * Decrypts a ciphertext as an outgoing log. + * + * This is executable by the sender of the note, and uses the ovsk to decrypt the payload. + * The outgoing parts are decrypted to retrieve information that allows the sender to + * decrypt the incoming log, and learn about the note contents. + * + * Produces the same output as `decryptAsIncoming`. + * + * @param ciphertext - The ciphertext for the log + * @param ovsk - The outgoing viewing secret key, used to decrypt the logs + * @returns The decrypted log payload + */ + public static decryptAsOutgoing(ciphertext: Buffer | bigint[], ovsk: GrumpkinPrivateKey) { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + const reader = BufferReader.asReader(input); + + // We don't use the tags as part of the decryption here, we just gotta read to skip them. + reader.readObject(Fr); // incoming tag + reader.readObject(Fr); // outgoing tag + + const ephPk = reader.readObject(Point); + + // Skip the incoming header + reader.readBytes(HEADER_SIZE); + + const outgoingHeader = EncryptedLogHeader.fromCiphertext(reader.readBytes(HEADER_SIZE), ovsk, ephPk); + + const ovskApp = computeOvskApp(ovsk, outgoingHeader.address); + const outgoingBody = EncryptedLogOutgoingBody.fromCiphertext(reader.readBytes(OUTGOING_BODY_SIZE), ovskApp, ephPk); + + // The incoming can be of variable size, so we read until the end + const incomingBodySlice = reader.readToEnd(); + + const incomingBody = EncryptedLogIncomingBody.fromCiphertext( + incomingBodySlice, + outgoingBody.ephSk, + outgoingBody.recipientIvpkApp, + ); + + return new EncryptedLogPayload( + incomingBody.note, + outgoingHeader.address, + incomingBody.storageSlot, + incomingBody.noteTypeId, + ); + } +} diff --git a/yarn-project/circuit-types/src/logs/index.ts b/yarn-project/circuit-types/src/logs/index.ts index 2b91857d095..0e4b8200391 100644 --- a/yarn-project/circuit-types/src/logs/index.ts +++ b/yarn-project/circuit-types/src/logs/index.ts @@ -12,3 +12,4 @@ export * from './unencrypted_l2_log.js'; export * from './extended_unencrypted_l2_log.js'; export * from './encrypted_log_header.js'; export * from './encrypted_log_incoming_body.js'; +export * from './encrypted_log_outgoing_body.js'; diff --git a/yarn-project/circuit-types/src/logs/log_type.ts b/yarn-project/circuit-types/src/logs/log_type.ts index 1f9c247cc4a..c5a4e666496 100644 --- a/yarn-project/circuit-types/src/logs/log_type.ts +++ b/yarn-project/circuit-types/src/logs/log_type.ts @@ -5,10 +5,11 @@ import { type UnencryptedL2Log } from './unencrypted_l2_log.js'; * Defines possible log types. */ export enum LogType { + NOTEENCRYPTED, ENCRYPTED, UNENCRYPTED, } -export type FromLogType = TLogType extends LogType.ENCRYPTED - ? EncryptedL2Log - : UnencryptedL2Log; +export type FromLogType = TLogType extends LogType.UNENCRYPTED + ? UnencryptedL2Log + : EncryptedL2Log; diff --git a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts index c18bec7c3c5..86ad70a046b 100644 --- a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts +++ b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts @@ -1,4 +1,8 @@ -import { MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX } from '@aztec/circuits.js'; +import { + MAX_ENCRYPTED_LOGS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + MAX_UNENCRYPTED_LOGS_PER_TX, +} from '@aztec/circuits.js'; import { sha256Trunc } from '@aztec/foundation/crypto'; import { BufferReader, prefixBufferWithLength } from '@aztec/foundation/serialize'; @@ -6,6 +10,7 @@ import isEqual from 'lodash.isequal'; import { type EncryptedL2Log } from './encrypted_l2_log.js'; import { EncryptedFunctionL2Logs, type FunctionL2Logs, UnencryptedFunctionL2Logs } from './function_l2_logs.js'; +import { LogType } from './log_type.js'; import { type UnencryptedL2Log } from './unencrypted_l2_log.js'; /** @@ -83,7 +88,7 @@ export abstract class TxL2Logs { * Note: This is a TS implementation of `computeKernelLogsHash` function in Decoder.sol. See that function documentation * for more details. */ - public hash(): Buffer { + public hash(logType: LogType = LogType.ENCRYPTED): Buffer { if (this.unrollLogs().length == 0) { return Buffer.alloc(32); } @@ -94,7 +99,8 @@ export abstract class TxL2Logs { } // pad the end of logs with 0s // NB - This assumes MAX_ENCRYPTED_LOGS_PER_TX == MAX_UNENCRYPTED_LOGS_PER_TX - for (let i = 0; i < MAX_ENCRYPTED_LOGS_PER_TX - this.unrollLogs().length; i++) { + const pad = logType == LogType.NOTEENCRYPTED ? MAX_NOTE_ENCRYPTED_LOGS_PER_TX : MAX_ENCRYPTED_LOGS_PER_TX; + for (let i = 0; i < pad - this.unrollLogs().length; i++) { flattenedLogs = Buffer.concat([flattenedLogs, Buffer.alloc(32)]); } diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index dba3bca5427..2e21eac53b9 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -2,13 +2,13 @@ import { AztecAddress, CallRequest, GasSettings, + LogHash, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, Nullifier, PartialPrivateTailPublicInputsForPublic, PrivateKernelTailCircuitPublicInputs, Proof, PublicCallRequest, - SideEffect, computeContractClassId, getContractClassFromArtifact, } from '@aztec/circuits.js'; @@ -68,6 +68,7 @@ export const mockTx = ( const isForPublic = totalPublicCallRequests > 0; const data = PrivateKernelTailCircuitPublicInputs.empty(); const firstNullifier = new Nullifier(new Fr(seed + 1), 0, Fr.ZERO); + const noteEncryptedLogs = EncryptedTxL2Logs.empty(); // Mock seems to have no new notes => no note logs const encryptedLogs = hasLogs ? EncryptedTxL2Logs.random(2, 3) : EncryptedTxL2Logs.empty(); // 2 priv function invocations creating 3 encrypted logs each const unencryptedLogs = hasLogs ? UnencryptedTxL2Logs.random(2, 1) : UnencryptedTxL2Logs.empty(); // 2 priv function invocations creating 1 unencrypted log each data.constants.txContext.gasSettings = GasSettings.default(); @@ -102,17 +103,26 @@ export const mockTx = ( encryptedLogs.functionLogs.forEach((log, j) => { // ts complains if we dont check .forPublic here, even though it is defined ^ if (data.forPublic) { - data.forPublic.end.encryptedLogsHashes[j] = new SideEffect(Fr.fromBuffer(log.hash()), new Fr(i++)); + data.forPublic.end.encryptedLogsHashes[j] = new LogHash( + Fr.fromBuffer(log.hash()), + i++, + new Fr(log.toBuffer().length), + ); } }); unencryptedLogs.functionLogs.forEach((log, j) => { if (data.forPublic) { - data.forPublic.end.unencryptedLogsHashes[j] = new SideEffect(Fr.fromBuffer(log.hash()), new Fr(i++)); + data.forPublic.end.unencryptedLogsHashes[j] = new LogHash( + Fr.fromBuffer(log.hash()), + i++, + new Fr(log.toBuffer().length), + ); } }); } } else { data.forRollup!.end.newNullifiers[0] = firstNullifier.value; + data.forRollup!.end.noteEncryptedLogsHash = Fr.fromBuffer(noteEncryptedLogs.hash(0)); data.forRollup!.end.encryptedLogsHash = Fr.fromBuffer(encryptedLogs.hash()); data.forRollup!.end.unencryptedLogsHash = Fr.fromBuffer(unencryptedLogs.hash()); } @@ -120,6 +130,7 @@ export const mockTx = ( const tx = new Tx( data, new Proof(Buffer.alloc(0)), + noteEncryptedLogs, encryptedLogs, unencryptedLogs, publicCallRequests, diff --git a/yarn-project/circuit-types/src/stats/metrics.ts b/yarn-project/circuit-types/src/stats/metrics.ts index 5b7217e42f5..41fbc543064 100644 --- a/yarn-project/circuit-types/src/stats/metrics.ts +++ b/yarn-project/circuit-types/src/stats/metrics.ts @@ -4,7 +4,8 @@ import { type StatsEventName } from './stats.js'; export type MetricGroupBy = | 'block-size' | 'chain-length' - | 'circuit-name' + | 'protocol-circuit-name' + | 'app-circuit-name' | 'classes-registered' | 'leaf-count' | 'data-writes' @@ -111,41 +112,101 @@ export const Metrics = [ events: ['note-processor-caught-up'], }, { - name: 'circuit_simulation_time_in_ms', - groupBy: 'circuit-name', + name: 'protocol_circuit_simulation_time_in_ms', + groupBy: 'protocol-circuit-name', description: 'Time to run a circuit simulation.', events: ['circuit-simulation'], }, { - name: 'circuit_witness_generation_time_in_ms', - groupBy: 'circuit-name', + name: 'protocol_circuit_witness_generation_time_in_ms', + groupBy: 'protocol-circuit-name', description: 'Time to generate the partial witness for a circuit', events: ['circuit-simulation'], }, { - name: 'circuit_proving_time_in_ms', - groupBy: 'circuit-name', + name: 'protocol_circuit_proving_time_in_ms', + groupBy: 'protocol-circuit-name', description: 'Time to prove circuit execution.', events: ['circuit-proving'], }, { - name: 'circuit_input_size_in_bytes', - groupBy: 'circuit-name', + name: 'protocol_circuit_input_size_in_bytes', + groupBy: 'protocol-circuit-name', description: 'Size of the inputs to a circuit simulation.', events: ['circuit-simulation'], }, { - name: 'circuit_output_size_in_bytes', - groupBy: 'circuit-name', + name: 'protocol_circuit_output_size_in_bytes', + groupBy: 'protocol-circuit-name', description: 'Size of the outputs (ie public inputs) from a circuit simulation.', events: ['circuit-simulation'], }, { - name: 'circuit_proof_size_in_bytes', - groupBy: 'circuit-name', + name: 'protocol_circuit_proof_size_in_bytes', + groupBy: 'protocol-circuit-name', description: 'Size of the proof produced by a circuit.', events: ['circuit-proving'], }, + { + name: 'protocol_circuit_num_public_inputs', + groupBy: 'protocol-circuit-name', + description: 'Number of public inputs.', + events: ['circuit-proving'], + }, + { + name: 'protocol_circuit_size_in_gates', + groupBy: 'protocol-circuit-name', + description: 'Size of the proof produced by a circuit.', + events: ['circuit-proving'], + }, + { + name: 'app_circuit_simulation_time_in_ms', + groupBy: 'app-circuit-name', + description: 'Time to run a circuit simulation.', + events: ['circuit-simulation'], + }, + { + name: 'app_circuit_input_size_in_bytes', + groupBy: 'app-circuit-name', + description: 'Size of the inputs to a circuit simulation.', + events: ['circuit-simulation'], + }, + { + name: 'app_circuit_output_size_in_bytes', + groupBy: 'app-circuit-name', + description: 'Size of the outputs (ie public inputs) from a circuit simulation.', + events: ['circuit-simulation'], + }, + { + name: 'app_circuit_proof_size_in_bytes', + groupBy: 'app-circuit-name', + description: 'Size of the proof produced by a circuit.', + events: ['circuit-proving'], + }, + { + name: 'app_circuit_witness_generation_time_in_ms', + groupBy: 'app-circuit-name', + description: 'Time to generate the partial witness for a circuit', + events: ['circuit-simulation'], + }, + { + name: 'app_circuit_proving_time_in_ms', + groupBy: 'app-circuit-name', + description: 'Duration of proving an app circuit.', + events: ['circuit-proving'], + }, + { + name: 'app_circuit_size_in_gates', + groupBy: 'app-circuit-name', + description: 'Size of an app circuit.', + events: ['circuit-proving'], + }, + { + name: 'app_circuit_num_public_inputs', + groupBy: 'app-circuit-name', + description: 'Number of public inputs.', + events: ['circuit-proving'], + }, { name: 'tx_size_in_bytes', groupBy: 'classes-registered', diff --git a/yarn-project/circuit-types/src/stats/stats.ts b/yarn-project/circuit-types/src/stats/stats.ts index ca21b3b6704..750735d8cef 100644 --- a/yarn-project/circuit-types/src/stats/stats.ts +++ b/yarn-project/circuit-types/src/stats/stats.ts @@ -52,12 +52,14 @@ export type CircuitName = | 'base-parity' | 'root-parity' | 'base-rollup' - | 'private-kernel-init' - | 'private-kernel-ordering' - | 'root-rollup' | 'merge-rollup' + | 'root-rollup' + | 'private-kernel-init' | 'private-kernel-inner' | 'private-kernel-reset' + | 'private-kernel-tail' + | 'private-kernel-tail-to-public' + | 'app-circuit' | 'public-kernel-setup' | 'public-kernel-app-logic' | 'public-kernel-teardown' @@ -69,6 +71,8 @@ export type CircuitSimulationStats = { eventName: 'circuit-simulation'; /** Name of the circuit. */ circuitName: CircuitName; + /** Optional. The function name that's being simulated */ + appCircuitName?: string; /** Duration in ms. */ duration: number; /** Size in bytes of circuit inputs. */ @@ -83,6 +87,8 @@ export type CircuitWitnessGenerationStats = { eventName: 'circuit-witness-generation'; /** Name of the circuit. */ circuitName: CircuitName; + /** Optional. The function name that's being proven */ + appCircuitName?: string; /** Duration in ms. */ duration: number; /** Size in bytes of circuit inputs. */ @@ -97,14 +103,20 @@ export type CircuitProvingStats = { eventName: 'circuit-proving'; /** Name of the circuit. */ circuitName: CircuitName; + /** Optional. The function name that was proven */ + appCircuitName?: string; /** Duration in ms. */ duration: number; + /** The size of the circuit (in gates) */ + circuitSize: number; /** Size in bytes of circuit inputs. */ inputSize: number; - /** Size in bytes of circuit outputs (aka public inputs). */ + /** Size in bytes of circuit output. */ outputSize: number; /** Size in bytes of the proof. */ proofSize: number; + /** The number of public inputs */ + numPublicInputs: number; }; /** Stats for an L2 block built by a sequencer. */ @@ -165,10 +177,14 @@ export type TxStats = { size: number; /** Size of the proof. */ proofSize: number; + /** Number of note encrypted logs. */ + noteEncryptedLogCount: number; /** Number of encrypted logs. */ encryptedLogCount: number; /** Number of unencrypted logs. */ unencryptedLogCount: number; + /** Serialized size of note encrypted logs. */ + noteEncryptedLogSize: number; /** Serialized size of encrypted logs. */ encryptedLogSize: number; /** Serialized size of unencrypted logs. */ diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 24a5265b305..4fad2f35652 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -46,7 +46,7 @@ export type PublicKernelRequest = PublicKernelTailRequest | PublicKernelNonTailR * Represents a tx that has been processed by the sequencer public processor, * so its kernel circuit public inputs are filled in. */ -export type ProcessedTx = Pick & { +export type ProcessedTx = Pick & { /** * Output of the private tail or public tail kernel circuit for this tx. */ @@ -134,6 +134,8 @@ export function makeProcessedTx( hash: tx.getTxHash(), data: kernelOutput, proof, + // TODO(4712): deal with non-revertible logs here + noteEncryptedLogs: revertReason ? EncryptedTxL2Logs.empty() : tx.noteEncryptedLogs, encryptedLogs: revertReason ? EncryptedTxL2Logs.empty() : tx.encryptedLogs, unencryptedLogs: revertReason ? UnencryptedTxL2Logs.empty() : tx.unencryptedLogs, isEmpty: false, @@ -157,6 +159,7 @@ export function makeEmptyProcessedTx(header: Header, chainId: Fr, version: Fr): const hash = new TxHash(Fr.ZERO.toBuffer()); return { hash, + noteEncryptedLogs: EncryptedTxL2Logs.empty(), encryptedLogs: EncryptedTxL2Logs.empty(), unencryptedLogs: UnencryptedTxL2Logs.empty(), data: emptyKernelOutput, @@ -178,6 +181,7 @@ export function toTxEffect(tx: ProcessedTx): TxEffect { tx.data.end.publicDataUpdateRequests .map(t => new PublicDataWrite(t.leafSlot, t.newValue)) .filter(h => !h.isEmpty()), + tx.noteEncryptedLogs || EncryptedTxL2Logs.empty(), tx.encryptedLogs || EncryptedTxL2Logs.empty(), tx.unencryptedLogs || UnencryptedTxL2Logs.empty(), ); diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index a07496a238c..44853dcd91e 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -25,6 +25,10 @@ export class Tx { * Proof from the private kernel circuit. */ public readonly proof: Proof, + /** + * Encrypted note logs generated by the tx. + */ + public readonly noteEncryptedLogs: EncryptedTxL2Logs, /** * Encrypted logs generated by the tx. */ @@ -67,6 +71,7 @@ export class Tx { reader.readObject(PrivateKernelTailCircuitPublicInputs), reader.readObject(Proof), reader.readObject(EncryptedTxL2Logs), + reader.readObject(EncryptedTxL2Logs), reader.readObject(UnencryptedTxL2Logs), reader.readArray(reader.readNumber(), PublicCallRequest), reader.readObject(PublicCallRequest), @@ -81,6 +86,7 @@ export class Tx { return serializeToBuffer([ this.data, this.proof, + this.noteEncryptedLogs, this.encryptedLogs, this.unencryptedLogs, this.enqueuedPublicFunctionCalls.length, @@ -96,6 +102,7 @@ export class Tx { public toJSON() { return { data: this.data.toBuffer().toString('hex'), + noteEncryptedLogs: this.noteEncryptedLogs.toBuffer().toString('hex'), encryptedLogs: this.encryptedLogs.toBuffer().toString('hex'), unencryptedLogs: this.unencryptedLogs.toBuffer().toString('hex'), proof: this.proof.toBuffer().toString('hex'), @@ -120,6 +127,7 @@ export class Tx { */ public static fromJSON(obj: any) { const publicInputs = PrivateKernelTailCircuitPublicInputs.fromBuffer(Buffer.from(obj.data, 'hex')); + const noteEncryptedLogs = EncryptedTxL2Logs.fromBuffer(Buffer.from(obj.noteEncryptedLogs, 'hex')); const encryptedLogs = EncryptedTxL2Logs.fromBuffer(Buffer.from(obj.encryptedLogs, 'hex')); const unencryptedLogs = UnencryptedTxL2Logs.fromBuffer(Buffer.from(obj.unencryptedLogs, 'hex')); const proof = Buffer.from(obj.proof, 'hex'); @@ -130,6 +138,7 @@ export class Tx { return new Tx( publicInputs, Proof.fromBuffer(proof), + noteEncryptedLogs, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, @@ -154,8 +163,10 @@ export class Tx { getStats(): TxStats { return { txHash: this.getTxHash().toString(), + noteEncryptedLogCount: this.noteEncryptedLogs.getTotalLogCount(), encryptedLogCount: this.encryptedLogs.getTotalLogCount(), unencryptedLogCount: this.unencryptedLogs.getTotalLogCount(), + noteEncryptedLogSize: this.noteEncryptedLogs.getSerializedLength(), encryptedLogSize: this.encryptedLogs.getSerializedLength(), unencryptedLogSize: this.unencryptedLogs.getSerializedLength(), @@ -208,6 +219,7 @@ export class Tx { static clone(tx: Tx): Tx { const publicInputs = PrivateKernelTailCircuitPublicInputs.fromBuffer(tx.data.toBuffer()); const proof = Proof.fromBuffer(tx.proof.toBuffer()); + const noteEncryptedLogs = EncryptedTxL2Logs.fromBuffer(Buffer.from(tx.noteEncryptedLogs.toBuffer())); const encryptedLogs = EncryptedTxL2Logs.fromBuffer(tx.encryptedLogs.toBuffer()); const unencryptedLogs = UnencryptedTxL2Logs.fromBuffer(tx.unencryptedLogs.toBuffer()); const enqueuedPublicFunctions = tx.enqueuedPublicFunctionCalls.map(x => { @@ -217,6 +229,7 @@ export class Tx { return new Tx( publicInputs, proof, + noteEncryptedLogs, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, diff --git a/yarn-project/circuit-types/src/tx_effect.test.ts b/yarn-project/circuit-types/src/tx_effect.test.ts index 0858dc1837f..82876e870aa 100644 --- a/yarn-project/circuit-types/src/tx_effect.test.ts +++ b/yarn-project/circuit-types/src/tx_effect.test.ts @@ -10,6 +10,6 @@ describe('TxEffect', () => { it('hash of empty tx effect matches snapshot', () => { const txEffectHash = TxEffect.empty().hash().toString('hex'); // If you change this you have to change the hardcoded value in TxsDecoder.sol! - expect(txEffectHash).toMatchInlineSnapshot(`"00822c2cdfbc7a6e5f4dd355251f4dfc9af1b1a64152464b9b83c5007eeed0f3"`); + expect(txEffectHash).toMatchInlineSnapshot(`"00543e0a6642ffeb8039296861765a53407bba62bd1c97ca43374de950bbe0a7"`); }); }); diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index 4caaec8c57f..422727c5062 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -43,6 +43,7 @@ export class TxEffect { /** * The logs of the txEffect */ + public noteEncryptedLogs: EncryptedTxL2Logs, public encryptedLogs: EncryptedTxL2Logs, public unencryptedLogs: UnencryptedTxL2Logs, ) { @@ -95,6 +96,7 @@ export class TxEffect { serializeArrayOfBufferableToVector(this.nullifiers, 1), serializeArrayOfBufferableToVector(this.l2ToL1Msgs, 1), serializeArrayOfBufferableToVector(this.publicDataWrites, 1), + this.noteEncryptedLogs, this.encryptedLogs, this.unencryptedLogs, ]); @@ -116,6 +118,7 @@ export class TxEffect { reader.readVectorUint8Prefix(Fr), reader.readVectorUint8Prefix(PublicDataWrite), reader.readObject(EncryptedTxL2Logs), + reader.readObject(EncryptedTxL2Logs), reader.readObject(UnencryptedTxL2Logs), ); } @@ -145,6 +148,7 @@ export class TxEffect { PublicDataWrite.SIZE_IN_BYTES * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, ); + const noteEncryptedLogsHashKernel0 = this.noteEncryptedLogs.hash(0); const encryptedLogsHashKernel0 = this.encryptedLogs.hash(); const unencryptedLogsHashKernel0 = this.unencryptedLogs.hash(); @@ -155,6 +159,7 @@ export class TxEffect { nullifiersBuffer, l2ToL1MsgsBuffer, publicDataWritesBuffer, + noteEncryptedLogsHashKernel0, encryptedLogsHashKernel0, unencryptedLogsHashKernel0, ]); @@ -176,12 +181,23 @@ export class TxEffect { makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.random), makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataWrite.random), EncryptedTxL2Logs.random(numPrivateCallsPerTx, numEncryptedLogsPerCall), + EncryptedTxL2Logs.random(numPrivateCallsPerTx, numEncryptedLogsPerCall), UnencryptedTxL2Logs.random(numPublicCallsPerTx, numUnencryptedLogsPerCall), ); } static empty(): TxEffect { - return new TxEffect(RevertCode.OK, Fr.ZERO, [], [], [], [], EncryptedTxL2Logs.empty(), UnencryptedTxL2Logs.empty()); + return new TxEffect( + RevertCode.OK, + Fr.ZERO, + [], + [], + [], + [], + EncryptedTxL2Logs.empty(), + EncryptedTxL2Logs.empty(), + UnencryptedTxL2Logs.empty(), + ); } isEmpty(): boolean { @@ -205,6 +221,7 @@ export class TxEffect { nullifiers: [${this.nullifiers.map(h => h.toString()).join(', ')}], l2ToL1Msgs: [${this.l2ToL1Msgs.map(h => h.toString()).join(', ')}], publicDataWrites: [${this.publicDataWrites.map(h => h.toString()).join(', ')}], + noteEncryptedLogs: ${JSON.stringify(this.noteEncryptedLogs.toJSON())}, encryptedLogs: ${JSON.stringify(this.encryptedLogs.toJSON())}, unencryptedLogs: ${JSON.stringify(this.unencryptedLogs.toJSON())} }`; diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts index b9ea7e72325..27a6f736bd6 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts @@ -36,6 +36,19 @@ export class Grumpkin { return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(96, 160))); } + /** + * Add two points. + * @param a - Point a in the addition + * @param b - Point b to add to a + * @returns Result of the addition. + */ + public add(a: Point, b: Point): Point { + this.wasm.writeMemory(0, a.toBuffer()); + this.wasm.writeMemory(64, b.toBuffer()); + this.wasm.call('ecc_grumpkin__add', 0, 64, 128); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(128, 192))); + } + /** * Multiplies a set of points by a scalar. * @param points - Points to multiply. diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index ecf31fe3334..630f632bc1f 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -9,9 +9,10 @@ export const MAX_NEW_L2_TO_L1_MSGS_PER_CALL = 2; export const MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 16; export const MAX_PUBLIC_DATA_READS_PER_CALL = 16; export const MAX_NOTE_HASH_READ_REQUESTS_PER_CALL = 32; -export const MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 2; -export const MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL = 2; -export const MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL = 1; +export const MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 32; +export const MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL = 32; +export const MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL = 16; +export const MAX_NOTE_ENCRYPTED_LOGS_PER_CALL = 16; export const MAX_ENCRYPTED_LOGS_PER_CALL = 4; export const MAX_UNENCRYPTED_LOGS_PER_CALL = 4; export const MAX_NEW_NOTE_HASHES_PER_TX = 64; @@ -22,9 +23,10 @@ export const MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 32; export const MAX_PUBLIC_DATA_READS_PER_TX = 32; export const MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; export const MAX_NOTE_HASH_READ_REQUESTS_PER_TX = 128; -export const MAX_NULLIFIER_READ_REQUESTS_PER_TX = 8; -export const MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX = 8; -export const MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX = 4; +export const MAX_NULLIFIER_READ_REQUESTS_PER_TX = 128; +export const MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX = 128; +export const MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX = 64; +export const MAX_NOTE_ENCRYPTED_LOGS_PER_TX = 64; export const MAX_ENCRYPTED_LOGS_PER_TX = 8; export const MAX_UNENCRYPTED_LOGS_PER_TX = 8; export const NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; @@ -60,8 +62,7 @@ export const MAX_ARGS_LENGTH = ARGS_HASH_CHUNK_COUNT * ARGS_HASH_CHUNK_LENGTH; export const INITIALIZATION_SLOT_SEPARATOR = 1000_000_000; export const INITIAL_L2_BLOCK_NUM = 1; export const BLOB_SIZE_IN_BYTES = 31 * 4096; -export const NESTED_CALL_L2_GAS_BUFFER = 20000; -export const MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 32000; +export const MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 20000; export const MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS = 3000; export const MAX_PACKED_BYTECODE_SIZE_PER_UNCONSTRAINED_FUNCTION_IN_FIELDS = 3000; export const REGISTERER_PRIVATE_FUNCTION_BROADCASTED_ADDITIONAL_FIELDS = 19; @@ -105,6 +106,8 @@ export const NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; export const SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; export const PARTIAL_STATE_REFERENCE_LENGTH = 6; export const READ_REQUEST_LENGTH = 2; +export const LOG_HASH_LENGTH = 3; +export const NOTE_LOG_HASH_LENGTH = 4; export const NOTE_HASH_LENGTH = 2; export const SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; export const NULLIFIER_LENGTH = 3; @@ -130,8 +133,9 @@ export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL + 2 + - SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL + - SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL + + NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_CALL + + LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL + + LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; @@ -147,7 +151,7 @@ export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL + L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL + 2 + - SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL + + LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + @@ -173,7 +177,7 @@ export const COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + - 4 + + 5 + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH + GAS_LENGTH; export const COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; @@ -183,8 +187,9 @@ export const PRIVATE_ACCUMULATED_DATA_LENGTH = SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX + SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH + - SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX + - SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX + + NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX + + LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX + + LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX + 2 + CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX + CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX; @@ -199,8 +204,9 @@ export const PUBLIC_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX * NOTE_HASH_LENGTH + MAX_NEW_NULLIFIERS_PER_TX * NULLIFIER_LENGTH + MAX_NEW_L2_TO_L1_MSGS_PER_TX * 1 + - MAX_ENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH + - MAX_UNENCRYPTED_LOGS_PER_TX * SIDE_EFFECT_LENGTH + + NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX + + MAX_ENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH + + MAX_UNENCRYPTED_LOGS_PER_TX * LOG_HASH_LENGTH + 2 + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH + MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX * CALL_REQUEST_LENGTH + diff --git a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap index 37d75fc64af..2d413b4089c 100644 --- a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap +++ b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap @@ -1,7 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`ContractAddress Public key hash matches Noir 1`] = `"0x22d83a089d7650514c2de24cd30185a414d943eaa19817c67bffe2c3183006a3"`; - exports[`ContractAddress computeContractAddressFromInstance 1`] = `"0x0bed63221d281713007bfb0c063e1f61d0646404fb3701b99bb92f41b6390604"`; exports[`ContractAddress computeInitializationHash 1`] = `Fr<0x109865e4b959adba34b722e72a69baaf9ee78e31bb1042318f0d91006ed86780>`; diff --git a/yarn-project/circuits.js/src/contract/contract_address.test.ts b/yarn-project/circuits.js/src/contract/contract_address.test.ts index c83f7fdd928..e81eaebfc49 100644 --- a/yarn-project/circuits.js/src/contract/contract_address.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_address.test.ts @@ -1,6 +1,6 @@ import { ABIParameterVisibility, type FunctionAbi, FunctionType } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; -import { setupCustomSnapshotSerializers, updateInlineTestData } from '@aztec/foundation/testing'; +import { setupCustomSnapshotSerializers } from '@aztec/foundation/testing'; import { AztecAddress, deriveKeys } from '../index.js'; import { @@ -69,17 +69,4 @@ describe('ContractAddress', () => { expect(address).toMatchSnapshot(); }); - - it('Public key hash matches Noir', () => { - const secretKey = new Fr(2n); - const hash = deriveKeys(secretKey).publicKeys.hash().toString(); - expect(hash).toMatchSnapshot(); - - // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data - updateInlineTestData( - 'noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr', - 'expected_public_keys_hash', - hash.toString(), - ); - }); }); diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts index 783ea56c885..d72783914be 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts @@ -1,4 +1,12 @@ -import { AztecAddress, Fr, NoteHash, Nullifier, type ScopedNoteHash, type ScopedNullifier } from '@aztec/circuits.js'; +import { + AztecAddress, + Fr, + NoteHash, + NoteLogHash, + Nullifier, + type ScopedNoteHash, + type ScopedNullifier, +} from '@aztec/circuits.js'; import { buildTransientDataHints } from './build_transient_data_hints.js'; @@ -7,6 +15,7 @@ describe('buildTransientDataHints', () => { let noteHashes: ScopedNoteHash[]; let nullifiers: ScopedNullifier[]; + let logs: NoteLogHash[]; beforeEach(() => { noteHashes = [ @@ -20,27 +29,38 @@ describe('buildTransientDataHints', () => { new Nullifier(new Fr(66), 600, new Fr(0)).scope(contractAddress), new Nullifier(new Fr(77), 700, new Fr(11)).scope(contractAddress), ]; + logs = [ + new NoteLogHash(new Fr(88), 350, new Fr(64), 300), + new NoteLogHash(new Fr(99), 375, new Fr(64), 300), + new NoteLogHash(new Fr(111), 150, new Fr(64), 100), + new NoteLogHash(new Fr(122), 250, new Fr(64), 200), + ]; }); it('builds index hints that link transient note hashes and nullifiers', () => { - const [nullifierIndexes, noteHashIndexes] = buildTransientDataHints(noteHashes, nullifiers); + const [nullifierIndexes, noteHashIndexesForNullifiers, noteHashIndexesForLogs] = buildTransientDataHints( + noteHashes, + nullifiers, + logs, + ); expect(nullifierIndexes).toEqual([3, 4, 1]); - expect(noteHashIndexes).toEqual([3, 2, 3, 0]); + expect(noteHashIndexesForNullifiers).toEqual([3, 2, 3, 0]); + expect(noteHashIndexesForLogs).toEqual([2, 2, 0, 3]); }); it('throws if no matching nullifier', () => { noteHashes[0].nullifierCounter = 450; - expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow('Unknown nullifier counter.'); + expect(() => buildTransientDataHints(noteHashes, nullifiers, logs)).toThrow('Unknown nullifier counter.'); }); it('throws if note hash does not match', () => { nullifiers[1].nullifier.noteHash = new Fr(11); - expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow('Hinted note hash does not match.'); + expect(() => buildTransientDataHints(noteHashes, nullifiers, logs)).toThrow('Hinted note hash does not match.'); }); it('throws if contract address does not match', () => { nullifiers[1].contractAddress = AztecAddress.fromBigInt(123456n); - expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow( + expect(() => buildTransientDataHints(noteHashes, nullifiers, logs)).toThrow( 'Contract address of hinted note hash does not match.', ); }); diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts index a9664d6e5ce..9eb26f51a4c 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts @@ -1,16 +1,27 @@ -import { type ScopedNoteHash, type ScopedNullifier, countAccumulatedItems } from '@aztec/circuits.js'; +import { type NoteLogHash, type ScopedNoteHash, type ScopedNullifier, countAccumulatedItems } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { type Tuple } from '@aztec/foundation/serialize'; -export function buildTransientDataHints( +export function buildTransientDataHints< + NOTE_HASHES_LEN extends number, + NULLIFIERS_LEN extends number, + LOGS_LEN extends number, +>( noteHashes: Tuple, nullifiers: Tuple, + noteLogs: Tuple, noteHashesLength: NOTE_HASHES_LEN = noteHashes.length as NOTE_HASHES_LEN, nullifiersLength: NULLIFIERS_LEN = nullifiers.length as NULLIFIERS_LEN, -): [Tuple, Tuple] { + logsLength: LOGS_LEN = noteLogs.length as LOGS_LEN, +): [Tuple, Tuple, Tuple] { const nullifierIndexMap: Map = new Map(); nullifiers.forEach((n, i) => nullifierIndexMap.set(n.counter, i)); + const logNoteHashMap: Map = new Map(); + noteLogs.forEach((n, i) => { + logNoteHashMap.set(n.noteHashCounter, (logNoteHashMap.get(n.noteHashCounter) || []).concat([i])); + }); + const nullifierIndexesForNoteHashes: Tuple = makeTuple( noteHashesLength, () => nullifiersLength, @@ -21,6 +32,8 @@ export function buildTransientDataHints noteHashesLength, ); + const noteHashIndexesForLogs: Tuple = makeTuple(logsLength, () => noteHashesLength); + const numNoteHashes = countAccumulatedItems(noteHashes); for (let i = 0; i < numNoteHashes; i++) { const noteHash = noteHashes[i]; @@ -38,10 +51,17 @@ export function buildTransientDataHints { + noteHashIndexesForLogs[logIndex] = i; + }); + } + nullifierIndexesForNoteHashes[i] = nullifierIndex; noteHashIndexesForNullifiers[nullifierIndex] = i; } } - return [nullifierIndexesForNoteHashes, noteHashIndexesForNullifiers]; + return [nullifierIndexesForNoteHashes, noteHashIndexesForNullifiers, noteHashIndexesForLogs]; } diff --git a/yarn-project/circuits.js/src/keys/index.test.ts b/yarn-project/circuits.js/src/keys/index.test.ts index 5f1d70705d2..de6d68d1610 100644 --- a/yarn-project/circuits.js/src/keys/index.test.ts +++ b/yarn-project/circuits.js/src/keys/index.test.ts @@ -23,7 +23,7 @@ describe('🔑', () => { // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data updateInlineTestData( - 'noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr', + 'noir-projects/aztec-nr/aztec/src/keys/public_keys.nr', 'expected_public_keys_hash', expected.toString(), ); diff --git a/yarn-project/circuits.js/src/keys/index.ts b/yarn-project/circuits.js/src/keys/index.ts index 3de9716a203..2dc73210e08 100644 --- a/yarn-project/circuits.js/src/keys/index.ts +++ b/yarn-project/circuits.js/src/keys/index.ts @@ -1,16 +1,36 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2Hash, sha512ToGrumpkinScalar } from '@aztec/foundation/crypto'; -import { type Fr, type GrumpkinScalar } from '@aztec/foundation/fields'; +import { Fq, type Fr, type GrumpkinScalar } from '@aztec/foundation/fields'; import { Grumpkin } from '../barretenberg/crypto/grumpkin/index.js'; import { GeneratorIndex } from '../constants.gen.js'; -import { type GrumpkinPrivateKey } from '../types/grumpkin_private_key.js'; +import { GrumpkinPrivateKey } from '../types/grumpkin_private_key.js'; +import { type PublicKey } from '../types/public_key.js'; import { PublicKeys } from '../types/public_keys.js'; +const curve = new Grumpkin(); + export function computeAppNullifierSecretKey(masterNullifierSecretKey: GrumpkinPrivateKey, app: AztecAddress): Fr { return poseidon2Hash([masterNullifierSecretKey.high, masterNullifierSecretKey.low, app, GeneratorIndex.NSK_M]); } +export function computeIvpkApp(ivpk: PublicKey, address: AztecAddress) { + const I = Fq.fromBuffer(poseidon2Hash([address.toField(), ivpk.x, ivpk.y, GeneratorIndex.IVSK_M]).toBuffer()); + return curve.add(curve.mul(Grumpkin.generator, I), ivpk); +} + +export function computeIvskApp(ivsk: GrumpkinPrivateKey, address: AztecAddress) { + const ivpk = curve.mul(Grumpkin.generator, ivsk); + const I = Fq.fromBuffer(poseidon2Hash([address.toField(), ivpk.x, ivpk.y, GeneratorIndex.IVSK_M]).toBuffer()); + return new Fq((I.toBigInt() + ivsk.toBigInt()) % Fq.MODULUS); +} + +export function computeOvskApp(ovsk: GrumpkinPrivateKey, address: AztecAddress) { + return GrumpkinPrivateKey.fromBuffer( + poseidon2Hash([address.toField(), ovsk.high, ovsk.low, GeneratorIndex.OVSK_M]).toBuffer(), + ); +} + export function deriveMasterNullifierSecretKey(secretKey: Fr): GrumpkinScalar { return sha512ToGrumpkinScalar([secretKey, GeneratorIndex.NSK_M]); } @@ -29,13 +49,17 @@ export function computeAddress(publicKeysHash: Fr, partialAddress: Fr) { return AztecAddress.fromField(addressFr); } +export function derivePublicKeyFromSecretKey(secretKey: Fq) { + const curve = new Grumpkin(); + return curve.mul(curve.generator(), secretKey); +} + /** * Computes secret and public keys and public keys hash from a secret key. * @param secretKey - The secret key to derive keys from. * @returns The derived keys. */ export function deriveKeys(secretKey: Fr) { - const curve = new Grumpkin(); // First we derive master secret keys - we use sha512 here because this derivation will never take place // in a circuit const masterNullifierSecretKey = deriveMasterNullifierSecretKey(secretKey); @@ -44,11 +68,17 @@ export function deriveKeys(secretKey: Fr) { const masterTaggingSecretKey = sha512ToGrumpkinScalar([secretKey, GeneratorIndex.TSK_M]); // Then we derive master public keys + const masterNullifierPublicKey = derivePublicKeyFromSecretKey(masterNullifierSecretKey); + const masterIncomingViewingPublicKey = derivePublicKeyFromSecretKey(masterIncomingViewingSecretKey); + const masterOutgoingViewingPublicKey = derivePublicKeyFromSecretKey(masterOutgoingViewingSecretKey); + const masterTaggingPublicKey = derivePublicKeyFromSecretKey(masterTaggingSecretKey); + + // We hash the public keys to get the public keys hash const publicKeys = new PublicKeys( - curve.mul(curve.generator(), masterNullifierSecretKey), - curve.mul(curve.generator(), masterIncomingViewingSecretKey), - curve.mul(curve.generator(), masterOutgoingViewingSecretKey), - curve.mul(curve.generator(), masterTaggingSecretKey), + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, ); return { diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap index 2de7a0cede8..1e2c25066fc 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x138c6ad441864ce43487e99d5e1e122c38b4b55d893edec04a32f5aacecc856c>`; +exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x11e550264f1840bab424389aa41ed8a5735c0aa8f94e41bd259caab964ff93dc>`; -exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x2078c0fe8fa7dc6d0c4623ec068d3297e027e60131ff4b0e333a99f72503aa32>`; +exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x17b804ce2f015fe4761c3f524103213bdc3033a2b226f5bf33637b0e7650b8ff>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap index 4ace7377315..e53a63c1e44 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x2517b9a84487bde68e18647e59530c6ffe4a7a88c5c556f013d09fd22b84ba35>`; +exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x067b9bd773ae49145e07b395da4f156fb35972e77bd4c40ed980ea8c9b90dd64>`; -exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x0e570673c6fee73b2c55d8acff12bdd9084820e6448c32cfb2600847f493bec1>`; +exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x024619cfa41bf4ca35cec15ee5c7b90183f44711737bea9aef56c40602765dbb>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap index 0ccb386246e..ff438f9222d 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap @@ -1,9 +1,9 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8"`; +exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x2b7f8b68d96d0011ecc576459899e9451fbd880568ccc7a071d9cf04e59abb65"`; -exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55"`; +exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x11998b1d33b8ba1c8fa7a6c2f5bc76b31bbaa80400554465c335ba31559ac1f9"`; -exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x020b98dcc882881a349edfd43044d58c8703fdcfc9d4b250b799d951608dcd6b>`; +exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x2e7cef26b4ef88a036f6f2bc5bd5d7457b7c2851c7357f1e6f79be9fdde4cf77>`; -exports[`PublicCallStackItem computes hash 1`] = `Fr<0x18d2b726728360b534121bb15accd1059f7df38225e76768e64d3e3040122440>`; +exports[`PublicCallStackItem computes hash 1`] = `Fr<0x025cc2b4f4105c6f39113d8544224901fbc23fde4d4c958d6c41907078b678bb>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap index 834668caf75..f7eeb88b15c 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8>`; +exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x1e4351db0c9aa20836e7009bc3e6a4555c92622c5e9cb3b49e2ec0fbbf59d0bd>`; -exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x0ac3cb8eb6605fc7aa83e9420eb988c1f6c9a5dcc2457c133216624bc6932619>`; +exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x07eb8710b05470576d027ed77139ac6755324fb1a67da6c7669e837c4eab7dc2>`; diff --git a/yarn-project/circuits.js/src/structs/complete_address.test.ts b/yarn-project/circuits.js/src/structs/complete_address.test.ts index 64bed5dbced..25c0de180c8 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.test.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.test.ts @@ -33,4 +33,38 @@ describe('CompleteAddress', () => { const address = CompleteAddress.fromBuffer(expectedAddress.toBuffer()); expect(address.equals(expectedAddress)).toBe(true); }); + + it('instantiates from string and individual components', () => { + // docs:start:instantiate-complete-address + // Typically a recipient would share their complete address with the sender + const completeAddressFromString = CompleteAddress.fromString( + '0x09bc7031bb21627cce6aac1dc710ecc92acd8475149c530a4bb57df63d9d6fe902a9372135ce5b49b46102732fabd742c31642543396013dde5b460075864607264c605bc115c6cb92a4db0a6b893fd3777341078693d0af22e3ff53f4c2ee2a2fae73914fc50d325e2707a8e996f1ad498429f715f998225dc6bd2ede05aaee055ee137d28b634322e0ea98afc42dfc48833e8d2879c34d23d6d1d337069cca212af0f28b7865b339e202a0077fd3bd8dddc472d055945ad99c02dcccd28bb22bb3585fca3e5751c9913521a390458d63e4d9b292e4872582f3b13da214470c14083a4567cf4f1e92696e6c01923bc6a8b414159446268b12fe8669ce44f1f5196561aca6c654d2405a5653002cba5552b50b6ce1afc9515ed6682507abcb3010040d791aeb30138efc9c7d36b47684af2f26f686672448349f05934ae7bbbf', + ); + + // Alternatively, a recipient could share the individual components with the sender + const address = Fr.fromString('0x09bc7031bb21627cce6aac1dc710ecc92acd8475149c530a4bb57df63d9d6fe9'); + const npkM = Point.fromString( + '0x02a9372135ce5b49b46102732fabd742c31642543396013dde5b460075864607264c605bc115c6cb92a4db0a6b893fd3777341078693d0af22e3ff53f4c2ee2a', + ); + const ivpkM = Point.fromString( + '0x2fae73914fc50d325e2707a8e996f1ad498429f715f998225dc6bd2ede05aaee055ee137d28b634322e0ea98afc42dfc48833e8d2879c34d23d6d1d337069cca', + ); + const ovpkM = Point.fromString( + '0x212af0f28b7865b339e202a0077fd3bd8dddc472d055945ad99c02dcccd28bb22bb3585fca3e5751c9913521a390458d63e4d9b292e4872582f3b13da214470c', + ); + const tpkM = Point.fromString( + '0x14083a4567cf4f1e92696e6c01923bc6a8b414159446268b12fe8669ce44f1f5196561aca6c654d2405a5653002cba5552b50b6ce1afc9515ed6682507abcb30', + ); + + const partialAddress = Fr.fromString('0x10040d791aeb30138efc9c7d36b47684af2f26f686672448349f05934ae7bbbf'); + + const completeAddressFromComponents = new CompleteAddress( + address, + new PublicKeys(npkM, ivpkM, ovpkM, tpkM), + partialAddress, + ); + // docs:end:instantiate-complete-address + + expect(completeAddressFromComponents.equals(completeAddressFromString)).toBe(true); + }); }); diff --git a/yarn-project/circuits.js/src/structs/index.ts b/yarn-project/circuits.js/src/structs/index.ts index 138baa2d6c9..8be49f1d142 100644 --- a/yarn-project/circuits.js/src/structs/index.ts +++ b/yarn-project/circuits.js/src/structs/index.ts @@ -34,6 +34,7 @@ export * from './kernel/public_kernel_tail_circuit_private_inputs.js'; export * from './kernel/kernel_circuit_public_inputs.js'; export * from './kernel/kernel_data.js'; export * from './l2_to_l1_message.js'; +export * from './log_hash.js'; export * from './max_block_number.js'; export * from './membership_witness.js'; export * from './non_existent_read_request_hints.js'; diff --git a/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts index ba1343f86a2..19f1460f973 100644 --- a/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts @@ -31,6 +31,11 @@ export class CombinedAccumulatedData { * All the new L2 to L1 messages created in this transaction. */ public newL2ToL1Msgs: Tuple, + /** + * Accumulated encrypted note logs hash from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. + */ + public noteEncryptedLogsHash: Fr, /** * Accumulated encrypted logs hash from all the previous kernel iterations. * Note: Truncated to 31 bytes to fit in Fr. @@ -63,6 +68,7 @@ export class CombinedAccumulatedData { this.newNoteHashes, this.newNullifiers, this.newL2ToL1Msgs, + this.noteEncryptedLogsHash, this.encryptedLogsHash, this.unencryptedLogsHash, this.encryptedLogPreimagesLength, @@ -91,6 +97,7 @@ export class CombinedAccumulatedData { Fr.fromBuffer(reader), Fr.fromBuffer(reader), Fr.fromBuffer(reader), + Fr.fromBuffer(reader), reader.readArray(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataUpdateRequest), reader.readObject(Gas), ); @@ -114,6 +121,7 @@ export class CombinedAccumulatedData { Fr.zero(), Fr.zero(), Fr.zero(), + Fr.zero(), makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataUpdateRequest.empty), Gas.empty(), ); @@ -133,6 +141,7 @@ export class CombinedAccumulatedData { .filter(x => !x.isZero()) .map(x => inspect(x)) .join(', ')}], + noteEncryptedLogsHash: ${this.noteEncryptedLogsHash.toString()}, encryptedLogsHash: ${this.encryptedLogsHash.toString()}, unencryptedLogsHash: ${this.unencryptedLogsHash.toString()}, encryptedLogPreimagesLength: ${this.encryptedLogPreimagesLength.toString()}, diff --git a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts index 3d0029b199b..0c83610688a 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts @@ -4,19 +4,19 @@ import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/s import { MAX_ENCRYPTED_LOGS_PER_TX, - type MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; import { ScopedL2ToL1Message } from '../l2_to_l1_message.js'; +import { LogHash, NoteLogHash } from '../log_hash.js'; import { ScopedNoteHash } from '../note_hash.js'; import { ScopedNullifier } from '../nullifier.js'; -import { SideEffect } from '../side_effects.js'; /** * Specific accumulated data structure for the final ordering private kernel circuit. It is included @@ -35,17 +35,22 @@ export class PrivateAccumulatedData { /** * All the new L2 to L1 messages created in this transaction. */ - public newL2ToL1Msgs: Tuple, + public newL2ToL1Msgs: Tuple, /** - * Accumulated encrypted logs hash from all the previous kernel iterations. - * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. + * Accumulated encrypted note logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. */ - public encryptedLogsHashes: Tuple, + public noteEncryptedLogsHashes: Tuple, /** - * Accumulated unencrypted logs hash from all the previous kernel iterations. - * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. + * Accumulated encrypted logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. */ - public unencryptedLogsHashes: Tuple, + public encryptedLogsHashes: Tuple, + /** + * Accumulated unencrypted logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. + */ + public unencryptedLogsHashes: Tuple, /** * Total accumulated length of the encrypted log preimages emitted in all the previous kernel iterations */ @@ -70,6 +75,7 @@ export class PrivateAccumulatedData { this.newNoteHashes, this.newNullifiers, this.newL2ToL1Msgs, + this.noteEncryptedLogsHashes, this.encryptedLogsHashes, this.unencryptedLogsHashes, this.encryptedLogPreimagesLength, @@ -94,8 +100,9 @@ export class PrivateAccumulatedData { reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message), - reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash), + reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, LogHash), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, LogHash), Fr.fromBuffer(reader), Fr.fromBuffer(reader), reader.readArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, CallRequest), @@ -117,8 +124,9 @@ export class PrivateAccumulatedData { makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash.empty), makeTuple(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier.empty), makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message.empty), - makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect.empty), - makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect.empty), + makeTuple(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash.empty), + makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, LogHash.empty), + makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, LogHash.empty), Fr.zero(), Fr.zero(), makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, CallRequest.empty), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_reset_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_reset_circuit_private_inputs.ts index 05da039bbe6..980af10d329 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_reset_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_reset_circuit_private_inputs.ts @@ -4,10 +4,12 @@ import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/s import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, } from '../../constants.gen.js'; import { type GrumpkinPrivateKey } from '../../types/grumpkin_private_key.js'; import { countAccumulatedItems } from '../../utils/index.js'; +import { NoteLogHash } from '../log_hash.js'; import { ScopedNoteHash } from '../note_hash.js'; import { ScopedNullifier } from '../nullifier.js'; import { @@ -22,10 +24,11 @@ export class PrivateKernelResetOutputs { constructor( public noteHashes: Tuple, public nullifiers: Tuple, + public noteEncryptedLogHashes: Tuple, ) {} toBuffer() { - return serializeToBuffer(this.noteHashes, this.nullifiers); + return serializeToBuffer(this.noteHashes, this.nullifiers, this.noteEncryptedLogHashes); } static fromBuffer(buffer: Buffer | BufferReader) { @@ -33,6 +36,7 @@ export class PrivateKernelResetOutputs { return new PrivateKernelResetOutputs( reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash), ); } } @@ -47,6 +51,10 @@ export class PrivateKernelResetHints { * Contains hints for the transient nullifiers to locate corresponding note hashes. */ public transientNoteHashIndexesForNullifiers: Tuple, + /** + * Contains hints for the transient logs to locate corresponding note hashes. + */ + public transientNoteHashIndexesForLogs: Tuple, /** * Contains hints for the transient read requests to localize corresponding commitments. */ @@ -66,6 +74,7 @@ export class PrivateKernelResetHints { return serializeToBuffer( this.transientNullifierIndexesForNoteHashes, this.transientNoteHashIndexesForNullifiers, + this.transientNoteHashIndexesForLogs, this.noteHashReadRequestHints, this.nullifierReadRequestHints, this.masterNullifierSecretKeys, @@ -82,6 +91,7 @@ export class PrivateKernelResetHints { return new PrivateKernelResetHints( reader.readNumbers(MAX_NEW_NOTE_HASHES_PER_TX), reader.readNumbers(MAX_NEW_NULLIFIERS_PER_TX), + reader.readNumbers(MAX_NOTE_ENCRYPTED_LOGS_PER_TX), reader.readObject({ fromBuffer: noteHashReadRequestHintsFromBuffer }), reader.readObject({ fromBuffer: nullifierReadRequestHintsFromBuffer }), reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GrumpkinScalar), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts index a9d1be68988..f3015824a00 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts @@ -4,12 +4,13 @@ import { MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, } from '../../constants.gen.js'; import { countAccumulatedItems } from '../../utils/index.js'; +import { LogHash, NoteLogHash } from '../log_hash.js'; import { ScopedNoteHash } from '../note_hash.js'; import { ScopedNullifier } from '../nullifier.js'; -import { SideEffect } from '../side_effects.js'; import { PrivateKernelData } from './private_kernel_data.js'; export class PrivateKernelTailHints { @@ -30,10 +31,18 @@ export class PrivateKernelTailHints { * The sorted new nullifiers indexes. */ public sortedNewNullifiersIndexes: Tuple, + /** + * The sorted encrypted note log hashes. + */ + public sortedNoteEncryptedLogHashes: Tuple, + /** + * The sorted encrypted note log hashes indexes. Maps original to sorted. + */ + public sortedNoteEncryptedLogHashesIndexes: Tuple, /** * The sorted encrypted log hashes. */ - public sortedEncryptedLogHashes: Tuple, + public sortedEncryptedLogHashes: Tuple, /** * The sorted encrypted log hashes indexes. Maps original to sorted. */ @@ -41,7 +50,7 @@ export class PrivateKernelTailHints { /** * The sorted unencrypted log hashes. */ - public sortedUnencryptedLogHashes: Tuple, + public sortedUnencryptedLogHashes: Tuple, /** * The sorted encrypted log hashes indexes. Maps original to sorted. */ @@ -54,6 +63,8 @@ export class PrivateKernelTailHints { this.sortedNewNoteHashesIndexes, this.sortedNewNullifiers, this.sortedNewNullifiersIndexes, + this.sortedNoteEncryptedLogHashes, + this.sortedNoteEncryptedLogHashesIndexes, this.sortedEncryptedLogHashes, this.sortedEncryptedLogHashesIndexes, this.sortedUnencryptedLogHashes, @@ -73,9 +84,11 @@ export class PrivateKernelTailHints { reader.readNumbers(MAX_NEW_NOTE_HASHES_PER_TX), reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), reader.readNumbers(MAX_NEW_NULLIFIERS_PER_TX), - reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash), + reader.readNumbers(MAX_NOTE_ENCRYPTED_LOGS_PER_TX), + reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, LogHash), reader.readNumbers(MAX_ENCRYPTED_LOGS_PER_TX), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, LogHash), reader.readNumbers(MAX_UNENCRYPTED_LOGS_PER_TX), ); } diff --git a/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts index 792891fc9c7..3843fe50efc 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts @@ -10,16 +10,17 @@ import { MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; import { Gas } from '../gas.js'; +import { LogHash, NoteLogHash } from '../log_hash.js'; import { NoteHash } from '../note_hash.js'; import { Nullifier } from '../nullifier.js'; import { PublicDataUpdateRequest } from '../public_data_update_request.js'; -import { SideEffect } from '../side_effects.js'; export class PublicAccumulatedData { constructor( @@ -36,15 +37,20 @@ export class PublicAccumulatedData { */ public newL2ToL1Msgs: Tuple, /** - * Accumulated encrypted logs hash from all the previous kernel iterations. - * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. + * Accumulated encrypted note logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. */ - public encryptedLogsHashes: Tuple, + public noteEncryptedLogsHashes: Tuple, /** - * Accumulated unencrypted logs hash from all the previous kernel iterations. - * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. + * Accumulated encrypted logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. */ - public unencryptedLogsHashes: Tuple, + public encryptedLogsHashes: Tuple, + /** + * Accumulated unencrypted logs hashes from all the previous kernel iterations. + * Note: Truncated to 31 bytes to fit in Fr. + */ + public unencryptedLogsHashes: Tuple, /** * Total accumulated length of the encrypted log preimages emitted in all the previous kernel iterations */ @@ -71,6 +77,7 @@ export class PublicAccumulatedData { this.newNoteHashes, this.newNullifiers, this.newL2ToL1Msgs, + this.noteEncryptedLogsHashes, this.encryptedLogsHashes, this.unencryptedLogsHashes, this.encryptedLogPreimagesLength, @@ -90,6 +97,7 @@ export class PublicAccumulatedData { this.newNoteHashes.every(x => x.isEmpty()) && this.newNullifiers.every(x => x.isEmpty()) && this.newL2ToL1Msgs.every(x => x.isZero()) && + this.noteEncryptedLogsHashes.every(x => x.isEmpty()) && this.encryptedLogsHashes.every(x => x.isEmpty()) && this.unencryptedLogsHashes.every(x => x.isEmpty()) && this.encryptedLogPreimagesLength.isZero() && @@ -115,6 +123,10 @@ export class PublicAccumulatedData { .filter(x => !x.isZero()) .map(h => inspect(h)) .join(', ')}], + noteEncryptedLogsHashes: [${this.noteEncryptedLogsHashes + .filter(x => !x.isEmpty()) + .map(h => inspect(h)) + .join(', ')}], encryptedLogsHashes: [${this.encryptedLogsHashes .filter(x => !x.isEmpty()) .map(h => inspect(h)) @@ -148,8 +160,9 @@ export class PublicAccumulatedData { reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHash), reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr), - reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash), + reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, LogHash), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, LogHash), Fr.fromBuffer(reader), Fr.fromBuffer(reader), reader.readArray(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataUpdateRequest), @@ -172,8 +185,9 @@ export class PublicAccumulatedData { makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, NoteHash.empty), makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Nullifier.empty), makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.zero), - makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect.empty), - makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect.empty), + makeTuple(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, NoteLogHash.empty), + makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, LogHash.empty), + makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, LogHash.empty), Fr.zero(), Fr.zero(), makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataUpdateRequest.empty), diff --git a/yarn-project/circuits.js/src/structs/log_hash.ts b/yarn-project/circuits.js/src/structs/log_hash.ts new file mode 100644 index 00000000000..7c1f0802217 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/log_hash.ts @@ -0,0 +1,72 @@ +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { type Ordered } from '../interfaces/index.js'; + +export class LogHash implements Ordered { + constructor(public value: Fr, public counter: number, public length: Fr) {} + + toFields(): Fr[] { + return [this.value, new Fr(this.counter), this.length]; + } + + static fromFields(fields: Fr[] | FieldReader) { + const reader = FieldReader.asReader(fields); + return new LogHash(reader.readField(), reader.readU32(), reader.readField()); + } + + isEmpty() { + return this.value.isZero() && this.length.isZero() && !this.counter; + } + + static empty() { + return new LogHash(Fr.zero(), 0, Fr.zero()); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.value, this.counter, this.length); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new LogHash(Fr.fromBuffer(reader), reader.readNumber(), Fr.fromBuffer(reader)); + } + + toString(): string { + return `value=${this.value} counter=${this.counter} length=${this.length}`; + } +} + +export class NoteLogHash implements Ordered { + constructor(public value: Fr, public counter: number, public length: Fr, public noteHashCounter: number) {} + + toFields(): Fr[] { + return [this.value, new Fr(this.counter), this.length, new Fr(this.noteHashCounter)]; + } + + static fromFields(fields: Fr[] | FieldReader) { + const reader = FieldReader.asReader(fields); + return new NoteLogHash(reader.readField(), reader.readU32(), reader.readField(), reader.readU32()); + } + + isEmpty() { + return this.value.isZero() && this.length.isZero() && !this.counter && !this.noteHashCounter; + } + + static empty() { + return new NoteLogHash(Fr.zero(), 0, Fr.zero(), 0); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.value, this.counter, this.length, this.noteHashCounter); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new NoteLogHash(Fr.fromBuffer(reader), reader.readNumber(), Fr.fromBuffer(reader), reader.readNumber()); + } + + toString(): string { + return `value=${this.value} counter=${this.counter} length=${this.length} noteHashCounter=${this.noteHashCounter}`; + } +} diff --git a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts index 7c9da159b58..7eae5eedb78 100644 --- a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts @@ -17,6 +17,7 @@ import { MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, + MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_CALL, @@ -29,12 +30,12 @@ import { Header } from '../structs/header.js'; import { isEmptyArray } from '../utils/index.js'; import { CallContext } from './call_context.js'; import { L2ToL1Message } from './l2_to_l1_message.js'; +import { LogHash, NoteLogHash } from './log_hash.js'; import { MaxBlockNumber } from './max_block_number.js'; import { NoteHash } from './note_hash.js'; import { Nullifier } from './nullifier.js'; import { NullifierKeyValidationRequest } from './nullifier_key_validation_request.js'; import { ReadRequest } from './read_request.js'; -import { SideEffect } from './side_effects.js'; import { TxContext } from './tx_context.js'; /** @@ -113,16 +114,21 @@ export class PrivateCircuitPublicInputs { * The end side effect counter for this call. */ public endSideEffectCounter: Fr, + /** + * Hash of the encrypted note logs emitted in this function call. + * Note: Truncated to 31 bytes to fit in Fr. + */ + public noteEncryptedLogsHashes: Tuple, /** * Hash of the encrypted logs emitted in this function call. * Note: Truncated to 31 bytes to fit in Fr. */ - public encryptedLogsHashes: Tuple, + public encryptedLogsHashes: Tuple, /** * Hash of the unencrypted logs emitted in this function call. * Note: Truncated to 31 bytes to fit in Fr. */ - public unencryptedLogsHashes: Tuple, + public unencryptedLogsHashes: Tuple, /** * Length of the encrypted log preimages emitted in this function call. * Note: Here so that the gas cost of this request can be measured by circuits, without actually needing to feed @@ -181,8 +187,9 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readObject(Fr), reader.readObject(Fr), - reader.readArray(MAX_ENCRYPTED_LOGS_PER_CALL, SideEffect), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, NoteLogHash), + reader.readArray(MAX_ENCRYPTED_LOGS_PER_CALL, LogHash), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), reader.readObject(Fr), reader.readObject(Fr), reader.readObject(Header), @@ -210,8 +217,9 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readField(), reader.readField(), - reader.readArray(MAX_ENCRYPTED_LOGS_PER_CALL, SideEffect), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect), + reader.readArray(MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, NoteLogHash), + reader.readArray(MAX_ENCRYPTED_LOGS_PER_CALL, LogHash), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), reader.readField(), reader.readField(), reader.readObject(Header), @@ -242,8 +250,9 @@ export class PrivateCircuitPublicInputs { makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message.empty), Fr.ZERO, Fr.ZERO, - makeTuple(MAX_ENCRYPTED_LOGS_PER_CALL, SideEffect.empty), - makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect.empty), + makeTuple(MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, NoteLogHash.empty), + makeTuple(MAX_ENCRYPTED_LOGS_PER_CALL, LogHash.empty), + makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash.empty), Fr.ZERO, Fr.ZERO, Header.empty(), @@ -269,6 +278,7 @@ export class PrivateCircuitPublicInputs { isZeroArray(this.publicCallStackHashes) && this.publicTeardownFunctionHash.isZero() && isEmptyArray(this.newL2ToL1Msgs) && + isEmptyArray(this.noteEncryptedLogsHashes) && isEmptyArray(this.encryptedLogsHashes) && isEmptyArray(this.unencryptedLogsHashes) && this.encryptedLogPreimagesLength.isZero() && @@ -302,6 +312,7 @@ export class PrivateCircuitPublicInputs { fields.newL2ToL1Msgs, fields.startSideEffectCounter, fields.endSideEffectCounter, + fields.noteEncryptedLogsHashes, fields.encryptedLogsHashes, fields.unencryptedLogsHashes, fields.encryptedLogPreimagesLength, diff --git a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts index 77b4f05483c..0674dc53b56 100644 --- a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts @@ -33,11 +33,11 @@ import { Gas } from './gas.js'; import { GlobalVariables } from './global_variables.js'; import { Header } from './header.js'; import { L2ToL1Message } from './l2_to_l1_message.js'; +import { LogHash } from './log_hash.js'; import { NoteHash } from './note_hash.js'; import { Nullifier } from './nullifier.js'; import { ReadRequest } from './read_request.js'; import { RevertCode } from './revert_code.js'; -import { SideEffect } from './side_effects.js'; /** * Public inputs to a public circuit. @@ -106,7 +106,7 @@ export class PublicCircuitPublicInputs { * Hash of the unencrypted logs emitted in this function call. * Note: Truncated to 31 bytes to fit in Fr. */ - public unencryptedLogsHashes: Tuple, + public unencryptedLogsHashes: Tuple, /** * Length of the unencrypted log preimages emitted in this function call. */ @@ -166,7 +166,7 @@ export class PublicCircuitPublicInputs { makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message.empty), Fr.ZERO, Fr.ZERO, - makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect.empty), + makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash.empty), Fr.ZERO, Header.empty(), GlobalVariables.empty(), @@ -277,7 +277,7 @@ export class PublicCircuitPublicInputs { reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readObject(Fr), reader.readObject(Fr), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), reader.readObject(Fr), reader.readObject(Header), reader.readObject(GlobalVariables), @@ -306,7 +306,7 @@ export class PublicCircuitPublicInputs { reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readField(), reader.readField(), - reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, SideEffect), + reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), reader.readField(), Header.fromFields(reader), GlobalVariables.fromFields(reader), diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 7cfc28ff701..49ccb6738b4 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -37,6 +37,7 @@ import { GrumpkinScalar, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, L2ToL1Message, + LogHash, MAX_ENCRYPTED_LOGS_PER_CALL, MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, @@ -45,6 +46,8 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_CALL, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL, @@ -74,6 +77,7 @@ import { NUM_BASE_PARITY_PER_ROOT_PARITY, NUM_MSGS_PER_BASE_PARITY, NoteHash, + NoteLogHash, Nullifier, NullifierKeyValidationRequest, NullifierLeafPreimage, @@ -118,7 +122,6 @@ import { RootRollupPublicInputs, ScopedNullifierKeyValidationRequest, ScopedReadRequest, - SideEffect, StateDiffHints, StateReference, TxContext, @@ -147,8 +150,12 @@ import { ValidationRequests } from '../structs/validation_requests.js'; * @param seed - The seed to use for generating the object. * @returns A side effect object. */ -export function makeNewSideEffect(seed: number): SideEffect { - return new SideEffect(fr(seed), fr(seed + 1)); +function makeLogHash(seed: number) { + return new LogHash(fr(seed), seed + 1, fr(seed + 2)); +} + +function makeNoteLogHash(seed: number) { + return new NoteLogHash(fr(seed + 3), seed + 1, fr(seed + 2), seed); } function makeNoteHash(seed: number) { @@ -298,10 +305,11 @@ export function makeCombinedAccumulatedData(seed = 1, full = false): CombinedAcc tupleGenerator(MAX_NEW_NOTE_HASHES_PER_TX, fr, seed + 0x120, Fr.zero), tupleGenerator(MAX_NEW_NULLIFIERS_PER_TX, fr, seed + 0x200, Fr.zero), tupleGenerator(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x600, Fr.zero), - fr(seed + 0x700), // encrypted logs hash - fr(seed + 0x800), // unencrypted logs hash - fr(seed + 0x900), // encrypted_log_preimages_length - fr(seed + 0xa00), // unencrypted_log_preimages_length + fr(seed + 0x700), // note encrypted logs hash + fr(seed + 0x800), // encrypted logs hash + fr(seed + 0x900), // unencrypted logs hash + fr(seed + 0xa00), // encrypted_log_preimages_length + fr(seed + 0xb00), // unencrypted_log_preimages_length tupleGenerator( MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, makePublicDataUpdateRequest, @@ -328,10 +336,11 @@ export function makePublicAccumulatedData(seed = 1, full = false): PublicAccumul tupleGenerator(MAX_NEW_NOTE_HASHES_PER_TX, makeNoteHash, seed + 0x120, NoteHash.empty), tupleGenerator(MAX_NEW_NULLIFIERS_PER_TX, makeNullifier, seed + 0x200, Nullifier.empty), tupleGenerator(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x600, Fr.zero), - tupleGenerator(MAX_ENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x700, SideEffect.empty), // encrypted logs hashes - tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x800, SideEffect.empty), // unencrypted logs hashes - fr(seed + 0x900), // encrypted_log_preimages_length - fr(seed + 0xa00), // unencrypted_log_preimages_length + tupleGenerator(MAX_NOTE_ENCRYPTED_LOGS_PER_TX, makeNoteLogHash, seed + 0x700, NoteLogHash.empty), // note encrypted logs hashes + tupleGenerator(MAX_ENCRYPTED_LOGS_PER_TX, makeLogHash, seed + 0x800, LogHash.empty), // encrypted logs hashes + tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_TX, makeLogHash, seed + 0x900, LogHash.empty), // unencrypted logs hashes + fr(seed + 0xa00), // encrypted_log_preimages_length + fr(seed + 0xb00), // unencrypted_log_preimages_length tupleGenerator( MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, makePublicDataUpdateRequest, @@ -407,7 +416,7 @@ export function makePublicCircuitPublicInputs( tupleGenerator(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, makeL2ToL1Message, seed + 0x900, L2ToL1Message.empty), fr(seed + 0xa00), fr(seed + 0xa01), - tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_CALL, makeNewSideEffect, seed + 0x901, SideEffect.empty), + tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_CALL, makeLogHash, seed + 0x901, LogHash.empty), fr(seed + 0x902), makeHeader(seed + 0xa00, undefined), makeGlobalVariables(seed + 0xa01), @@ -785,8 +794,9 @@ export function makePrivateCircuitPublicInputs(seed = 0): PrivateCircuitPublicIn newL2ToL1Msgs: makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, makeL2ToL1Message, seed + 0x800), startSideEffectCounter: fr(seed + 0x849), endSideEffectCounter: fr(seed + 0x850), - encryptedLogsHashes: makeTuple(MAX_ENCRYPTED_LOGS_PER_CALL, makeNewSideEffect, seed + 0x900), - unencryptedLogsHashes: makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, makeNewSideEffect, seed + 0xa00), + noteEncryptedLogsHashes: makeTuple(MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, makeNoteLogHash, seed + 0x875), + encryptedLogsHashes: makeTuple(MAX_ENCRYPTED_LOGS_PER_CALL, makeLogHash, seed + 0x900), + unencryptedLogsHashes: makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, makeLogHash, seed + 0xa00), encryptedLogPreimagesLength: fr(seed + 0xb00), unencryptedLogPreimagesLength: fr(seed + 0xc00), historicalHeader: makeHeader(seed + 0xd00, undefined), diff --git a/yarn-project/circuits.js/src/types/public_keys.ts b/yarn-project/circuits.js/src/types/public_keys.ts index 4478705a11d..babb3c8ddbc 100644 --- a/yarn-project/circuits.js/src/types/public_keys.ts +++ b/yarn-project/circuits.js/src/types/public_keys.ts @@ -1,6 +1,6 @@ import { poseidon2Hash } from '@aztec/foundation/crypto'; import { type Fr, Point } from '@aztec/foundation/fields'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { GeneratorIndex } from '../constants.gen.js'; import { type PublicKey } from './public_key.js'; @@ -107,4 +107,14 @@ export class PublicKeys { ...this.masterTaggingPublicKey.toFields(), ]; } + + static fromFields(fields: Fr[] | FieldReader): PublicKeys { + const reader = FieldReader.asReader(fields); + return new PublicKeys( + reader.readObject(Point), + reader.readObject(Point), + reader.readObject(Point), + reader.readObject(Point), + ); + } } diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index db3bc073183..571ad75dc41 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -198,9 +198,9 @@ bench-tx-size: DO +E2E_COMPOSE_TEST --test=benchmarks/bench_tx_size_fees.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --enable_gas=1 --compose_file=./scripts/docker-compose-no-sandbox.yml DO ../../+UPLOAD_LOGS --PULL_REQUEST=$PULL_REQUEST --BRANCH=$BRANCH --COMMIT_HASH=$COMMIT_HASH -bench-proving: +bench-prover: ARG PULL_REQUEST ARG BRANCH ARG COMMIT_HASH - DO +E2E_COMPOSE_TEST --test=bench_proving --debug="aztec:benchmarks:*,aztec:prover*,aztec:bb*" --enable_gas=1 --compose_file=./scripts/docker-compose-no-sandbox.yml + DO +E2E_COMPOSE_TEST --test=bench_prover --debug="aztec:benchmarks:*,aztec:prover*,aztec:bb*,aztec:pxe*" --enable_gas=1 --compose_file=./scripts/docker-compose-no-sandbox.yml DO ../../+UPLOAD_LOGS --PULL_REQUEST=$PULL_REQUEST --BRANCH=$BRANCH --COMMIT_HASH=$COMMIT_HASH diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts new file mode 100644 index 00000000000..5a4ba2bcdf3 --- /dev/null +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -0,0 +1,246 @@ +import { getSchnorrAccount, getSchnorrWallet } from '@aztec/accounts/schnorr'; +import { type AztecNodeService } from '@aztec/aztec-node'; +import { EthAddress, PrivateFeePaymentMethod, PublicFeePaymentMethod, TxStatus } from '@aztec/aztec.js'; +import { type AccountWallet } from '@aztec/aztec.js/wallet'; +import { CompleteAddress, Fq, Fr, GasSettings } from '@aztec/circuits.js'; +import { FPCContract, GasTokenContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; +import { getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { ProverPool } from '@aztec/prover-client/prover-pool'; +import { type PXEService, createPXEService } from '@aztec/pxe'; + +import { jest } from '@jest/globals'; + +import { getACVMConfig } from '../fixtures/get_acvm_config.js'; +import { getBBConfig } from '../fixtures/get_bb_config.js'; +import { type EndToEndContext, setup } from '../fixtures/utils.js'; + +jest.setTimeout(3_600_000); + +const txTimeoutSec = 3600; + +describe('benchmarks/proving', () => { + let ctx: EndToEndContext; + + let schnorrWalletSalt: Fr; + let schnorrWalletEncKey: Fr; + let schnorrWalletSigningKey: Fq; + let schnorrWalletAddress: CompleteAddress; + + let recipient: CompleteAddress; + + let initialGasContract: GasTokenContract; + let initialTestContract: TestContract; + let initialTokenContract: TokenContract; + let initialFpContract: FPCContract; + + let provingPxes: PXEService[]; + + let acvmCleanup: () => Promise; + let bbCleanup: () => Promise; + let proverPool: ProverPool; + + // setup the environment quickly using fake proofs + beforeAll(async () => { + ctx = await setup( + 1, + { + // do setup with fake proofs + realProofs: false, + proverAgents: 4, + proverAgentPollInterval: 10, + minTxsPerBlock: 1, + }, + {}, + true, // enable gas + ); + + schnorrWalletSalt = Fr.random(); + schnorrWalletEncKey = Fr.random(); + schnorrWalletSigningKey = Fq.random(); + const initialSchnorrWallet = await getSchnorrAccount( + ctx.pxe, + schnorrWalletEncKey, + schnorrWalletSigningKey, + schnorrWalletSalt, + ) + .deploy({ + skipClassRegistration: false, + skipPublicDeployment: false, + }) + .getWallet(); + schnorrWalletAddress = initialSchnorrWallet.getCompleteAddress(); + + initialTestContract = await TestContract.deploy(initialSchnorrWallet).send().deployed(); + initialTokenContract = await TokenContract.deploy( + initialSchnorrWallet, + initialSchnorrWallet.getAddress(), + 'test', + 't', + 18, + ) + .send() + .deployed(); + initialGasContract = await GasTokenContract.at( + getCanonicalGasTokenAddress(ctx.deployL1ContractsValues.l1ContractAddresses.gasPortalAddress), + initialSchnorrWallet, + ); + initialFpContract = await FPCContract.deploy( + initialSchnorrWallet, + initialTokenContract.address, + initialGasContract.address, + ) + .send() + .deployed(); + + await Promise.all([ + initialGasContract.methods.mint_public(initialFpContract.address, 1e12).send().wait(), + initialTokenContract.methods.mint_public(initialSchnorrWallet.getAddress(), 1e12).send().wait(), + initialTokenContract.methods.privately_mint_private_note(1e12).send().wait(), + ]); + + recipient = CompleteAddress.random(); + }); + + // remove the fake prover and setup the real one + beforeAll(async () => { + const [acvmConfig, bbConfig] = await Promise.all([getACVMConfig(ctx.logger), getBBConfig(ctx.logger)]); + if (!acvmConfig || !bbConfig) { + throw new Error('Missing ACVM or BB config'); + } + + acvmCleanup = acvmConfig.cleanup; + bbCleanup = bbConfig.cleanup; + + proverPool = ProverPool.nativePool( + { + ...acvmConfig, + ...bbConfig, + }, + 2, + 10, + ); + + ctx.logger.info('Stopping fake provers'); + await ctx.aztecNode.setConfig({ + // stop the fake provers + proverAgents: 0, + // 4-tx blocks so that we have at least one merge level + minTxsPerBlock: 4, + }); + + ctx.logger.info('Starting real provers'); + await proverPool.start((ctx.aztecNode as AztecNodeService).getProver().getProvingJobSource()); + + ctx.logger.info('Starting PXEs configured with real proofs'); + provingPxes = []; + for (let i = 0; i < 4; i++) { + const pxe = await createPXEService( + ctx.aztecNode, + { + proverEnabled: true, + bbBinaryPath: bbConfig.bbBinaryPath, + bbWorkingDirectory: bbConfig.bbWorkingDirectory, + l2BlockPollingIntervalMS: 1000, + l2StartingBlock: 1, + }, + `proving-pxe-${i}`, + ); + + await getSchnorrAccount(pxe, schnorrWalletEncKey, schnorrWalletSigningKey, schnorrWalletSalt).register(); + await pxe.registerContract(initialTokenContract); + await pxe.registerContract(initialTestContract); + await pxe.registerContract(initialFpContract); + await pxe.registerContract(initialGasContract); + + await pxe.registerRecipient(recipient); + + provingPxes.push(pxe); + } + }); + + afterAll(async () => { + for (const pxe of provingPxes) { + await pxe.stop(); + } + await proverPool.stop(); + await ctx.teardown(); + await acvmCleanup(); + await bbCleanup(); + }); + + it('builds a full block', async () => { + ctx.logger.info('+----------------------+'); + ctx.logger.info('| |'); + ctx.logger.info('| STARTING BENCHMARK |'); + ctx.logger.info('| |'); + ctx.logger.info('+----------------------+'); + + const fnCalls = [ + (await getTestContractOnPXE(0)).methods.emit_nullifier(42), + (await getTestContractOnPXE(1)).methods.emit_unencrypted(43), + (await getTestContractOnPXE(2)).methods.create_l2_to_l1_message_public(45, 46, EthAddress.random()), + (await getTokenContract(3)).methods.transfer(schnorrWalletAddress.address, recipient.address, 1000, 0), + ]; + + const feeFnCall1 = { + gasSettings: GasSettings.default(), + paymentMethod: new PublicFeePaymentMethod( + initialTokenContract.address, + initialFpContract.address, + await getWalletOnPxe(1), + ), + }; + + const feeFnCall3 = { + gasSettings: GasSettings.default(), + paymentMethod: new PrivateFeePaymentMethod( + initialTokenContract.address, + initialFpContract.address, + await getWalletOnPxe(3), + ), + }; + + ctx.logger.info('Proving first two transactions'); + await Promise.all([ + fnCalls[0].prove(), + fnCalls[1].prove({ + fee: feeFnCall1, + }), + ]); + + ctx.logger.info('Proving the next transactions'); + await Promise.all([ + fnCalls[2].prove(), + fnCalls[3].prove({ + fee: feeFnCall3, + }), + ]); + + ctx.logger.info('Finished proving all transactions'); + + ctx.logger.info('Sending transactions'); + const txs = [ + fnCalls[0].send(), + fnCalls[1].send({ fee: feeFnCall1 }), + fnCalls[2].send(), + fnCalls[3].send({ fee: feeFnCall3 }), + ]; + + const receipts = await Promise.all(txs.map(tx => tx.wait({ timeout: txTimeoutSec }))); + expect(receipts.every(r => r.status === TxStatus.MINED)).toBe(true); + }); + + function getWalletOnPxe(idx: number): Promise { + return getSchnorrWallet(provingPxes[idx], schnorrWalletAddress.address, schnorrWalletSigningKey); + } + + async function getTestContractOnPXE(idx: number): Promise { + const wallet = await getWalletOnPxe(idx); + return TestContract.at(initialTestContract.address, wallet); + } + + async function getTokenContract(idx: number): Promise { + const wallet = await getWalletOnPxe(idx); + return TokenContract.at(initialTokenContract.address, wallet); + } +}); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts deleted file mode 100644 index 52f1d2f8826..00000000000 --- a/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { type AztecNodeService } from '@aztec/aztec-node'; -import { type AccountWallet, EthAddress, PublicFeePaymentMethod, TxStatus } from '@aztec/aztec.js'; -import { GasSettings } from '@aztec/circuits.js'; -import { FPCContract, GasTokenContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; -import { getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; -import { ProverPool } from '@aztec/prover-client/prover-pool'; - -import { jest } from '@jest/globals'; - -import { getACVMConfig } from '../fixtures/get_acvm_config.js'; -import { getBBConfig } from '../fixtures/get_bb_config.js'; -import { type EndToEndContext, publicDeployAccounts, setup } from '../fixtures/utils.js'; - -jest.setTimeout(600_000); - -const txTimeoutSec = 600; - -describe('benchmarks/proving', () => { - let ctx: EndToEndContext; - let wallet: AccountWallet; - let testContract: TestContract; - let tokenContract: TokenContract; - let fpContract: FPCContract; - let acvmCleanup: () => Promise; - let bbCleanup: () => Promise; - let proverPool: ProverPool; - - // setup the environment quickly using fake proofs - beforeAll(async () => { - ctx = await setup( - 1, - { - // do setup with fake proofs - realProofs: false, - proverAgents: 4, - proverAgentPollInterval: 10, - minTxsPerBlock: 1, - }, - {}, - true, // enable gas - ); - - wallet = ctx.wallet; - - await publicDeployAccounts(wallet, ctx.wallets); - - testContract = await TestContract.deploy(wallet).send().deployed(); - tokenContract = await TokenContract.deploy(wallet, wallet.getAddress(), 'test', 't', 18).send().deployed(); - const gas = await GasTokenContract.at( - getCanonicalGasTokenAddress(ctx.deployL1ContractsValues.l1ContractAddresses.gasPortalAddress), - wallet, - ); - fpContract = await FPCContract.deploy(wallet, tokenContract.address, gas.address).send().deployed(); - - await Promise.all([ - gas.methods.mint_public(fpContract.address, 1e12).send().wait(), - tokenContract.methods.mint_public(wallet.getAddress(), 1e12).send().wait(), - ]); - }); - - // remove the fake prover and setup the real one - beforeAll(async () => { - const [acvmConfig, bbConfig] = await Promise.all([getACVMConfig(ctx.logger), getBBConfig(ctx.logger)]); - if (!acvmConfig || !bbConfig) { - throw new Error('Missing ACVM or BB config'); - } - - acvmCleanup = acvmConfig.cleanup; - bbCleanup = bbConfig.cleanup; - - proverPool = ProverPool.nativePool( - { - ...acvmConfig, - ...bbConfig, - }, - 4, - 10, - ); - - ctx.logger.info('Stopping fake provers'); - await ctx.aztecNode.setConfig({ - // stop the fake provers - proverAgents: 0, - // 4-tx blocks so that we have at least one merge level - minTxsPerBlock: 4, - }); - - ctx.logger.info('Starting real provers'); - await proverPool.start((ctx.aztecNode as AztecNodeService).getProver().getProvingJobSource()); - }); - - afterAll(async () => { - await proverPool.stop(); - await ctx.teardown(); - await acvmCleanup(); - await bbCleanup(); - }); - - it('builds a full block', async () => { - const txs = [ - // fully private tx - testContract.methods.emit_nullifier(42).send(), - // tx with setup, app, teardown - testContract.methods.emit_unencrypted(43).send({ - fee: { - gasSettings: GasSettings.default(), - paymentMethod: new PublicFeePaymentMethod(tokenContract.address, fpContract.address, wallet), - }, - }), - // tx with messages - testContract.methods.create_l2_to_l1_message_public(45, 46, EthAddress.random()).send(), - // tx with private and public exec - testContract.methods.set_tx_max_block_number(100, true).send({ - fee: { - gasSettings: GasSettings.default(), - paymentMethod: new PublicFeePaymentMethod(tokenContract.address, fpContract.address, wallet), - }, - }), - ]; - - const receipts = await Promise.all(txs.map(tx => tx.wait({ timeout: txTimeoutSec }))); - expect(receipts.every(r => r.status === TxStatus.MINED)).toBe(true); - }, 1_200_000); -}); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts index 26e03ead1e4..a73f08a32d2 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts @@ -67,28 +67,28 @@ describe('benchmarks/tx_size_fees', () => { 'native fee', () => NativeFeePaymentMethod.create(aliceWallet), // DA: - // non-rev: 1 nullifiers, overhead; rev: 2 note hashes, 1 nullifier, 624 B enc logs, 8 B unenc logs, teardown + // non-rev: 1 nullifiers, overhead; rev: 2 note hashes, 1 nullifier, 616 B enc logs, 0 B unenc logs, teardown // L2: // non-rev: 0; rev: 0 - 200012672n, + 200012416n, ], [ 'public fee', () => Promise.resolve(new PublicFeePaymentMethod(token.address, fpc.address, aliceWallet)), // DA: - // non-rev: 1 nullifiers, overhead; rev: 2 note hashes, 1 nullifier, 628 B enc logs, 12 B unenc logs, teardown + // non-rev: 1 nullifiers, overhead; rev: 2 note hashes, 1 nullifier, 616 B enc logs, 0 B unenc logs, teardown // L2: // non-rev: 0; rev: 0 - 200012800n, + 200012416n, ], [ 'private fee', () => Promise.resolve(new PrivateFeePaymentMethod(token.address, fpc.address, aliceWallet)), // DA: - // non-rev: 3 nullifiers, overhead; rev: 2 note hashes, 944 B enc logs, 20 B unenc logs, teardown + // non-rev: 3 nullifiers, overhead; rev: 2 note hashes, 616 B enc logs, 0 B unenc logs, teardown // L2: // non-rev: 0; rev: 0 - 200018496n, + 200012928n, ], ] as const)( 'sends a tx with a fee with %s payment method', diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index fcd544eecce..0e61140a4c8 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -16,7 +16,7 @@ import { ChildContract, TokenContract } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; -import { expectsNumOfEncryptedLogsInTheLastBlockToBe, setup, setupPXEService } from './fixtures/utils.js'; +import { expectsNumOfNoteEncryptedLogsInTheLastBlockToBe, setup, setupPXEService } from './fixtures/utils.js'; const TIMEOUT = 120_000; @@ -132,7 +132,7 @@ describe('e2e_2_pxes', () => { // Check initial balances and logs are as expected await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance); await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), 0n); - await expectsNumOfEncryptedLogsInTheLastBlockToBe(aztecNode, 1); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 1); // Transfer funds from A to B via PXE A const contractWithWalletA = await TokenContract.at(tokenAddress, walletA); @@ -144,7 +144,7 @@ describe('e2e_2_pxes', () => { // Check balances and logs are as expected await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance - transferAmount1); await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), transferAmount1); - await expectsNumOfEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); // Transfer funds from B to A via PXE B const contractWithWalletB = await TokenContract.at(tokenAddress, walletB); @@ -161,7 +161,7 @@ describe('e2e_2_pxes', () => { initialBalance - transferAmount1 + transferAmount2, ); await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), transferAmount1 - transferAmount2); - await expectsNumOfEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); }); const deployChildContractViaServerA = async () => { @@ -276,7 +276,7 @@ describe('e2e_2_pxes', () => { await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance); // don't check userB yet - await expectsNumOfEncryptedLogsInTheLastBlockToBe(aztecNode, 1); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 1); // Transfer funds from A to B via PXE A const contractWithWalletA = await TokenContract.at(tokenAddress, walletA); diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 0688b129d9d..cbafb01600a 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -18,6 +18,7 @@ import { TokenContract } from '@aztec/noir-contracts.js/Token'; import 'jest-extended'; import { TaggedNote } from '../../circuit-types/src/logs/l1_note_payload/tagged_note.js'; +import { DUPLICATE_NULLIFIER_ERROR } from './fixtures/fixtures.js'; import { setup } from './fixtures/utils.js'; describe('e2e_block_building', () => { @@ -114,9 +115,6 @@ describe('e2e_block_building', () => { describe('double-spends', () => { let contract: TestContract; let teardown: () => Promise; - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): clean up - // Under current public, we expect 'dropped', under the AVM, we expect 'reverted'. - const DUPLICATE_NULLIFIER_ERROR = /dropped|reverted/; beforeAll(async () => { ({ teardown, pxe, logger, wallet: owner } = await setup(1)); @@ -273,7 +271,7 @@ describe('e2e_block_building', () => { // compare logs expect(rct.status).toEqual('mined'); - const decryptedLogs = tx.encryptedLogs + const decryptedLogs = tx.noteEncryptedLogs .unrollLogs() .map(l => TaggedNote.fromEncryptedBuffer(l.data, keys.masterIncomingViewingSecretKey)); const notevalues = decryptedLogs.map(l => l?.notePayload.note.items[0]); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts index ce136e9113c..4ba3f4990f0 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts @@ -15,6 +15,7 @@ import { type TokenBridgeContract, type TokenContract } from '@aztec/noir-contra import { toFunctionSelector } from 'viem/utils'; +import { NO_L1_TO_L2_MSG_ERROR } from './fixtures/fixtures.js'; import { setup } from './fixtures/utils.js'; import { CrossChainTestHarness } from './shared/cross_chain_test_harness.js'; @@ -233,18 +234,6 @@ describe('e2e_cross_chain_messaging', () => { // Wait for the message to be available for consumption await crossChainTestHarness.makeMessageConsumable(msgHash); - const content = sha256ToField([ - Buffer.from(toFunctionSelector('mint_public(bytes32,uint256)').substring(2), 'hex'), - ownerAddress, - new Fr(bridgeAmount), - ]); - const wrongMessage = new L1ToL2Message( - new L1Actor(crossChainTestHarness.tokenPortalAddress, crossChainTestHarness.publicClient.chain.id), - new L2Actor(l2Bridge.address, 1), - content, - secretHashForL2MessageConsumption, - ); - // get message leaf index, needed for claiming in public const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); expect(maybeIndexAndPath).toBeDefined(); @@ -256,6 +245,6 @@ describe('e2e_cross_chain_messaging', () => { .withWallet(user2Wallet) .methods.claim_public(ownerAddress, bridgeAmount, secretForL2MessageConsumption, messageLeafIndex) .prove(), - ).rejects.toThrow(`No non-nullified L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); + ).rejects.toThrow(NO_L1_TO_L2_MSG_ERROR); }); }); diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index 97a91be1e12..2dedcbade81 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -188,7 +188,8 @@ describe('e2e_crowdfunding_and_claim', () => { nonce: noteNonces[0], }, value: extendedNote.note.items[0], - owner: extendedNote.note.items[1], + // eslint-disable-next-line camelcase + npk_m_hash: extendedNote.note.items[1], randomness: extendedNote.note.items[2], }; }; diff --git a/yarn-project/end-to-end/src/e2e_delegate_calls/delegate.test.ts b/yarn-project/end-to-end/src/e2e_delegate_calls/delegate.test.ts index 9f6890efb00..60117019d07 100644 --- a/yarn-project/end-to-end/src/e2e_delegate_calls/delegate.test.ts +++ b/yarn-project/end-to-end/src/e2e_delegate_calls/delegate.test.ts @@ -24,14 +24,13 @@ describe('e2e_delegate_calls', () => { .wait(); const delegatorValue = await delegatorContract.methods - .view_private_value(sentValue, wallet.getCompleteAddress().address) + .get_private_value(sentValue, wallet.getCompleteAddress().address) .simulate(); - const delegatedOnValue = await delegatedOnContract.methods - .view_private_value(sentValue, wallet.getCompleteAddress().address) - .simulate(); + await expect( + delegatedOnContract.methods.get_private_value(sentValue, wallet.getCompleteAddress().address).simulate(), + ).rejects.toThrow(`Assertion failed: Cannot return zero notes 'num_notes != 0'`); - expect(delegatedOnValue).toEqual(0n); expect(delegatorValue).toEqual(sentValue); }); diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index aa92b4d2b56..3b55d703fd5 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -24,6 +24,7 @@ import { writeTestData } from '@aztec/foundation/testing'; import { StatefulTestContract } from '@aztec/noir-contracts.js'; import { TestContract } from '@aztec/noir-contracts.js/Test'; +import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { DeployTest, type StatefulContractCtorArgs } from './deploy_test.js'; describe('e2e_deploy_contract contract class registration', () => { @@ -189,7 +190,8 @@ describe('e2e_deploy_contract contract class registration', () => { .constructor(...initArgs) .send({ skipPublicSimulation: true }) .wait(), - ).rejects.toThrow(/dropped/i); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): Make these a fixed error after transition. + ).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); }); @@ -225,7 +227,7 @@ describe('e2e_deploy_contract contract class registration', () => { .public_constructor(...initArgs) .send({ skipPublicSimulation: true }) .wait(), - ).rejects.toThrow(/dropped/i); + ).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts index 88abfabd694..a87653fdc89 100644 --- a/yarn-project/end-to-end/src/e2e_encryption.test.ts +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -1,4 +1,13 @@ -import { EncryptedLogHeader, EncryptedLogIncomingBody, Fr, GrumpkinScalar, Note, type Wallet } from '@aztec/aztec.js'; +import { + AztecAddress, + EncryptedLogHeader, + EncryptedLogIncomingBody, + EncryptedLogOutgoingBody, + Fr, + GrumpkinScalar, + Note, + type Wallet, +} from '@aztec/aztec.js'; import { Aes128, Grumpkin } from '@aztec/circuits.js/barretenberg'; import { TestContract } from '@aztec/noir-contracts.js'; @@ -99,4 +108,27 @@ describe('e2e_encryption', () => { expect(recreated.toBuffer()).toEqual(body.toBuffer()); }); + + it('encrypts log outgoing body', async () => { + const ephSk = GrumpkinScalar.random(); + const recipientIvskApp = GrumpkinScalar.random(); + const senderOvskApp = GrumpkinScalar.random(); + + const ephPk = grumpkin.mul(Grumpkin.generator, ephSk); + const recipientIvpkApp = grumpkin.mul(Grumpkin.generator, recipientIvskApp); + + const recipientAddress = AztecAddress.fromBigInt(BigInt('0xdeadbeef')); + + const body = new EncryptedLogOutgoingBody(ephSk, recipientAddress, recipientIvpkApp); + + const encrypted = await contract.methods + .compute_outgoing_log_body_ciphertext(ephSk, recipientAddress, recipientIvpkApp, senderOvskApp) + .simulate(); + + expect(Buffer.from(encrypted.map((x: bigint) => Number(x)))).toEqual(body.computeCiphertext(senderOvskApp, ephPk)); + + const recreated = EncryptedLogOutgoingBody.fromCiphertext(encrypted, senderOvskApp, ephPk); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); }); diff --git a/yarn-project/end-to-end/src/e2e_key_registry.test.ts b/yarn-project/end-to-end/src/e2e_key_registry.test.ts index 2d347a553fd..8f482b6c075 100644 --- a/yarn-project/end-to-end/src/e2e_key_registry.test.ts +++ b/yarn-project/end-to-end/src/e2e_key_registry.test.ts @@ -56,12 +56,16 @@ describe('Key Registry', () => { describe('failure cases', () => { it('throws when address preimage check fails', async () => { - const publicKeysBuf = account.publicKeys.toBuffer(); - // We randomly invalidate some of the keys by overwriting random byte - const byteIndex = Math.floor(Math.random() * publicKeysBuf.length); - publicKeysBuf[byteIndex] = (publicKeysBuf[byteIndex] + 2) % 256; + // First we get invalid keys by replacing any of the 8 fields of public keys with a random value + let invalidPublicKeys: PublicKeys; + { + // We call toBuffer and fromBuffer first to ensure that we get a deep copy + const publicKeysFields = PublicKeys.fromBuffer(account.publicKeys.toBuffer()).toFields(); + const randomIndex = Math.floor(Math.random() * publicKeysFields.length); + publicKeysFields[randomIndex] = Fr.random(); - const publicKeys = PublicKeys.fromBuffer(publicKeysBuf); + invalidPublicKeys = PublicKeys.fromFields(publicKeysFields); + } await expect( keyRegistry @@ -69,8 +73,8 @@ describe('Key Registry', () => { .methods.register( account, account.partialAddress, - // TODO(#6337): Directly dump account.publicKeys here - publicKeys.toNoirStruct(), + // TODO(#6337): Make calling `toNoirStruct()` unnecessary + invalidPublicKeys.toNoirStruct(), ) .send() .wait(), @@ -120,7 +124,7 @@ describe('Key Registry', () => { .methods.register( account, account.partialAddress, - // TODO(#6337): Directly dump account.publicKeys here + // TODO(#6337): Make calling `toNoirStruct()` unnecessary account.publicKeys.toNoirStruct(), ) .send() @@ -159,11 +163,13 @@ describe('Key Registry', () => { const secondNewMasterNullifierPublicKey = Point.random(); it('rotates npk_m', async () => { + // docs:start:key-rotation await keyRegistry .withWallet(wallets[0]) .methods.rotate_npk_m(wallets[0].getAddress(), firstNewMasterNullifierPublicKey, Fr.ZERO) .send() .wait(); + // docs:end:key-rotation // We check if our rotated nullifier key is equal to the key obtained from the getter by reading our registry // contract from the test contract. We expect this to fail because the change has not been applied yet diff --git a/yarn-project/end-to-end/src/e2e_key_rotation.test.ts b/yarn-project/end-to-end/src/e2e_key_rotation.test.ts new file mode 100644 index 00000000000..06ec9e71473 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_key_rotation.test.ts @@ -0,0 +1,233 @@ +import { createAccounts } from '@aztec/accounts/testing'; +import { + type AztecAddress, + type AztecNode, + type DebugLogger, + ExtendedNote, + Fq, + Fr, + Note, + type PXE, + type TxHash, + type Wallet, + computeSecretHash, + retryUntil, +} from '@aztec/aztec.js'; +import { type PublicKey, derivePublicKeyFromSecretKey } from '@aztec/circuits.js'; +import { KeyRegistryContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; +import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; + +import { jest } from '@jest/globals'; + +import { expectsNumOfNoteEncryptedLogsInTheLastBlockToBe, setup, setupPXEService } from './fixtures/utils.js'; + +const TIMEOUT = 120_000; + +const SHARED_MUTABLE_DELAY = 5; + +describe('e2e_key_rotation', () => { + jest.setTimeout(TIMEOUT); + + let aztecNode: AztecNode; + let pxeA: PXE; + let pxeB: PXE; + let walletA: Wallet; + let walletB: Wallet; + let logger: DebugLogger; + let teardownA: () => Promise; + let teardownB: () => Promise; + + let keyRegistryWithB: KeyRegistryContract; + let testContract: TestContract; + let contractWithWalletA: TokenContract; + let contractWithWalletB: TokenContract; + + let tokenAddress: AztecAddress; + + const initialBalance = 987n; + + beforeAll(async () => { + ({ + aztecNode, + pxe: pxeA, + wallets: [walletA], + logger, + teardown: teardownA, + } = await setup(1)); + + ({ pxe: pxeB, teardown: teardownB } = await setupPXEService(aztecNode, {}, undefined, true)); + + [walletB] = await createAccounts(pxeB, 1); + keyRegistryWithB = await KeyRegistryContract.at(getCanonicalKeyRegistryAddress(), walletB); + + // We deploy test and token contracts + testContract = await TestContract.deploy(walletA).send().deployed(); + const tokenInstance = await deployTokenContract(initialBalance, walletA.getAddress(), pxeA); + tokenAddress = tokenInstance.address; + + // Add account B to wallet A + await pxeA.registerRecipient(walletB.getCompleteAddress()); + // Add account A to wallet B + await pxeB.registerRecipient(walletA.getCompleteAddress()); + + // Add token to PXE B (PXE A already has it because it was deployed through it) + await pxeB.registerContract({ + artifact: TokenContract.artifact, + instance: tokenInstance, + }); + + contractWithWalletA = await TokenContract.at(tokenAddress, walletA); + contractWithWalletB = await TokenContract.at(tokenAddress, walletB); + }); + + afterEach(async () => { + await teardownB(); + await teardownA(); + }); + + const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { + const isUserSynchronized = async () => { + return await wallet.isAccountStateSynchronized(owner); + }; + await retryUntil(isUserSynchronized, `synch of user ${owner.toString()}`, 10); + }; + + const crossDelay = async () => { + for (let i = 0; i < SHARED_MUTABLE_DELAY; i++) { + // We send arbitrary tx to mine a block + await testContract.methods.emit_unencrypted(0).send().wait(); + } + }; + + const expectTokenBalance = async ( + wallet: Wallet, + tokenAddress: AztecAddress, + owner: AztecAddress, + expectedBalance: bigint, + checkIfSynchronized = true, + ) => { + if (checkIfSynchronized) { + // First wait until the corresponding PXE has synchronized the account + await awaitUserSynchronized(wallet, owner); + } + + // Then check the balance + const contractWithWallet = await TokenContract.at(tokenAddress, wallet); + const balance = await contractWithWallet.methods.balance_of_private(owner).simulate({ from: owner }); + logger.info(`Account ${owner} balance: ${balance}`); + expect(balance).toBe(expectedBalance); + }; + + const deployTokenContract = async (initialAdminBalance: bigint, admin: AztecAddress, pxe: PXE) => { + logger.info(`Deploying Token contract...`); + const contract = await TokenContract.deploy(walletA, admin, 'TokenName', 'TokenSymbol', 18).send().deployed(); + + if (initialAdminBalance > 0n) { + await mintTokens(contract, admin, initialAdminBalance, pxe); + } + + logger.info('L2 contract deployed'); + + return contract.instance; + }; + + const mintTokens = async (contract: TokenContract, recipient: AztecAddress, balance: bigint, pxe: PXE) => { + const secret = Fr.random(); + const secretHash = computeSecretHash(secret); + + const receipt = await contract.methods.mint_private(balance, secretHash).send().wait(); + + const note = new Note([new Fr(balance), secretHash]); + const extendedNote = new ExtendedNote( + note, + recipient, + contract.address, + TokenContract.storage.pending_shields.slot, + TokenContract.notes.TransparentNote.id, + receipt.txHash, + ); + await pxe.addNote(extendedNote); + + await contract.methods.redeem_shield(recipient, balance, secret).send().wait(); + }; + + it(`Rotates keys and uses them`, async () => { + // 1. We check that setup set initial balances as expected + await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance); + await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), 0n); + + // 2. Transfer funds from A to B via PXE A + let txHashTransfer1: TxHash; + const transfer1Amount = 654n; + { + ({ txHash: txHashTransfer1 } = await contractWithWalletA.methods + .transfer(walletA.getAddress(), walletB.getAddress(), transfer1Amount, 0) + .send() + .wait()); + + // Check balances and logs are as expected + await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance - transfer1Amount); + await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), transfer1Amount); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + } + + // 3. Rotates B key + let newNpkM: PublicKey; + { + const newNskM = Fq.random(); + newNpkM = derivePublicKeyFromSecretKey(newNskM); + await pxeB.rotateMasterNullifierKey(walletB.getAddress(), newNskM); + + await keyRegistryWithB.methods.rotate_npk_m(walletB.getAddress(), newNpkM, 0).send().wait(); + await crossDelay(); + } + + // 4. Transfer funds from A to B via PXE A + let txHashTransfer2: TxHash; + const transfer2Amount = 321n; + { + ({ txHash: txHashTransfer2 } = await contractWithWalletA.methods + .transfer(walletA.getAddress(), walletB.getAddress(), transfer2Amount, 0) + .send() + .wait()); + + await expectTokenBalance( + walletA, + tokenAddress, + walletA.getAddress(), + initialBalance - transfer1Amount - transfer2Amount, + ); + await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), transfer1Amount + transfer2Amount); + } + + // 5. Now we check that a correct nullifier keys were used in both transfers + { + await awaitUserSynchronized(walletB, walletB.getAddress()); + const transfer1Notes = await walletB.getNotes({ txHash: txHashTransfer1 }); + const transfer2Notes = await walletB.getNotes({ txHash: txHashTransfer2 }); + expect(transfer1Notes.length).toBe(1); + expect(transfer2Notes.length).toBe(1); + // Second field in the token note is the npk_m_hash + const noteNpkMHashTransfer1 = transfer1Notes[0].note.items[1]; + const noteNpkMHashTransfer2 = transfer2Notes[0].note.items[1]; + + // Now we check the note created in transfer 2 used the new npk_m_hash + expect(noteNpkMHashTransfer2.equals(newNpkM.hash())).toBe(true); + // We sanity check that the note created in transfer 1 had old npk_m_hash by checking it's different from the new + // one + expect(noteNpkMHashTransfer2.equals(noteNpkMHashTransfer1)).toBe(false); + } + + // 6. Finally we check that all the B notes are spendable by transferring full B balance to A + // --> this way we verify that it's possible to obtain both keys via oracles + { + await contractWithWalletB.methods + .transfer(walletB.getAddress(), walletA.getAddress(), transfer1Amount + transfer2Amount, 0) + .send() + .wait(); + + await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance); + await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), 0n); + } + }, 600_000); +}); diff --git a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts index 25b146be25a..c993171e774 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts @@ -15,7 +15,7 @@ import { } from '@aztec/aztec.js'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; -import { expectsNumOfEncryptedLogsInTheLastBlockToBe, setup } from './fixtures/utils.js'; +import { expectsNumOfNoteEncryptedLogsInTheLastBlockToBe, setup } from './fixtures/utils.js'; describe('e2e_multiple_accounts_1_enc_key', () => { let aztecNode: AztecNode | undefined; @@ -109,7 +109,7 @@ describe('e2e_multiple_accounts_1_enc_key', () => { await expectBalance(i, expectedBalances[i]); } - await expectsNumOfEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); logger.info(`Transfer ${transferAmount} from ${sender} to ${receiver} successful`); }; diff --git a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts index 4b49fa43f6b..ebfc03a48a3 100644 --- a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts @@ -1,6 +1,7 @@ import { type AztecAddress, type AztecNode, type DebugLogger, Fr, type Wallet } from '@aztec/aztec.js'; import { PendingNoteHashesContract } from '@aztec/noir-contracts.js/PendingNoteHashes'; +import { EncryptedTxL2Logs } from '../../circuit-types/src/logs/tx_l2_logs.js'; import { setup } from './fixtures/utils.js'; describe('e2e_pending_note_hashes_contract', () => { @@ -50,6 +51,20 @@ describe('e2e_pending_note_hashes_contract', () => { } }; + const expectNoteLogsSquashedExcept = async (exceptFirstFew: number) => { + const blockNum = await aztecNode.getBlockNumber(); + const block = (await aztecNode.getBlocks(blockNum, 1))[0]; + + const logArray = block.body.txEffects.flatMap(txEffect => txEffect.noteEncryptedLogs); + + for (let l = 0; l < exceptFirstFew + 1; l++) { + expect(logArray[l]).not.toEqual(EncryptedTxL2Logs.empty()); + } + for (let l = exceptFirstFew + 1; l < logArray.length; l++) { + expect(logArray[l]).toEqual(EncryptedTxL2Logs.empty()); + } + }; + const deployContract = async () => { logger.debug(`Deploying L2 contract...`); contract = await PendingNoteHashesContract.deploy(wallet).send().deployed(); @@ -87,6 +102,7 @@ describe('e2e_pending_note_hashes_contract', () => { await expectNoteHashesSquashedExcept(0); await expectNullifiersSquashedExcept(0); + await expectNoteLogsSquashedExcept(0); }); it('Squash! Aztec.nr function can "create" 2 notes and "nullify" both in the same TX', async () => { @@ -108,6 +124,7 @@ describe('e2e_pending_note_hashes_contract', () => { await expectNoteHashesSquashedExcept(0); await expectNullifiersSquashedExcept(0); + await expectNoteLogsSquashedExcept(0); }); it('Squash! Aztec.nr function can "create" 2 notes and "nullify" 1 in the same TX (kernel will squash one note + nullifier)', async () => { @@ -130,6 +147,7 @@ describe('e2e_pending_note_hashes_contract', () => { await expectNoteHashesSquashedExcept(1); await expectNullifiersSquashedExcept(0); + await expectNoteLogsSquashedExcept(1); }); it('Squash! Aztec.nr function can nullify a pending note and a persistent in the same TX', async () => { @@ -147,6 +165,7 @@ describe('e2e_pending_note_hashes_contract', () => { await expectNoteHashesSquashedExcept(1); // first TX just creates 1 persistent note await expectNullifiersSquashedExcept(0); + await expectNoteLogsSquashedExcept(1); // create another note, and nullify it and AND nullify the above-created note in the same TX await deployedContract.methods @@ -167,6 +186,7 @@ describe('e2e_pending_note_hashes_contract', () => { // the nullifier corresponding to this transient note is squashed, but the // other nullifier corresponding to the persistent note becomes persistent itself. await expectNullifiersSquashedExcept(1); + await expectNoteLogsSquashedExcept(0); }); it('get_notes function filters a nullified note created in a previous transaction', async () => { @@ -182,6 +202,7 @@ describe('e2e_pending_note_hashes_contract', () => { // There is a single new note hash. await expectNoteHashesSquashedExcept(1); + await expectNoteLogsSquashedExcept(1); await deployedContract.methods .test_insert_then_get_then_nullify_all_in_nested_calls( diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts index 72c5c4c0aad..306e9e7bb06 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts @@ -1,8 +1,6 @@ -import { Fr, L1Actor, L1ToL2Message, L2Actor, computeAuthWitMessageHash } from '@aztec/aztec.js'; -import { sha256ToField } from '@aztec/foundation/crypto'; - -import { toFunctionSelector } from 'viem'; +import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { NO_L1_TO_L2_MSG_ERROR } from '../fixtures/fixtures.js'; import { PublicCrossChainMessagingContractTest } from './public_cross_chain_messaging_contract_test.js'; describe('e2e_public_cross_chain_messaging deposits', () => { @@ -122,18 +120,6 @@ describe('e2e_public_cross_chain_messaging deposits', () => { await crossChainTestHarness.makeMessageConsumable(msgHash); - const content = sha256ToField([ - Buffer.from(toFunctionSelector('mint_public(bytes32,uint256)').substring(2), 'hex'), - user2Wallet.getAddress(), - new Fr(bridgeAmount), - ]); - const wrongMessage = new L1ToL2Message( - new L1Actor(crossChainTestHarness.tokenPortalAddress, crossChainTestHarness.publicClient.chain.id), - new L2Actor(l2Bridge.address, 1), - content, - secretHash, - ); - // get message leaf index, needed for claiming in public const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); expect(maybeIndexAndPath).toBeDefined(); @@ -145,7 +131,7 @@ describe('e2e_public_cross_chain_messaging deposits', () => { .withWallet(user2Wallet) .methods.claim_public(user2Wallet.getAddress(), bridgeAmount, secret, messageLeafIndex) .prove(), - ).rejects.toThrow(`No non-nullified L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); + ).rejects.toThrow(NO_L1_TO_L2_MSG_ERROR); // user2 consumes owner's L1-> L2 message on bridge contract and mints public tokens on L2 logger.info("user2 consumes owner's message on L2 Publicly"); diff --git a/yarn-project/end-to-end/src/fixtures/fixtures.ts b/yarn-project/end-to-end/src/fixtures/fixtures.ts index 23e0b482f27..ec11d1ae340 100644 --- a/yarn-project/end-to-end/src/fixtures/fixtures.ts +++ b/yarn-project/end-to-end/src/fixtures/fixtures.ts @@ -6,5 +6,7 @@ export const privateKey2 = Buffer.from('59c6995e998f97a5a0044966f0945389dc9e86da export const U128_UNDERFLOW_ERROR = "Assertion failed: attempt to subtract with underflow 'hi == high'"; export const U128_OVERFLOW_ERROR = "Assertion failed: attempt to add with overflow 'hi == high'"; export const BITSIZE_TOO_BIG_ERROR = "'self.__assert_max_bit_size(bit_size)'"; -// TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): Make this a fixed error after transition. -export const DUPLICATE_NULLIFIER_ERROR = /Transaction .*|.*duplicate nullifier.*/; +// TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): Make these a fixed error after transition. +export const DUPLICATE_NULLIFIER_ERROR = /dropped|duplicate nullifier|reverted/; +export const NO_L1_TO_L2_MSG_ERROR = + /No non-nullified L1 to L2 message found for message hash|Tried to consume nonexistent L1-to-L2 message/; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index f7bb329fd92..69773a10de4 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -490,7 +490,7 @@ export function getLogger() { * @param aztecNode - The instance of aztec node for retrieving the logs. * @param numEncryptedLogs - The number of expected logs. */ -export const expectsNumOfEncryptedLogsInTheLastBlockToBe = async ( +export const expectsNumOfNoteEncryptedLogsInTheLastBlockToBe = async ( aztecNode: AztecNode | undefined, numEncryptedLogs: number, ) => { @@ -500,7 +500,7 @@ export const expectsNumOfEncryptedLogsInTheLastBlockToBe = async ( return; } const l2BlockNum = await aztecNode.getBlockNumber(); - const encryptedLogs = await aztecNode.getLogs(l2BlockNum, 1, LogType.ENCRYPTED); + const encryptedLogs = await aztecNode.getLogs(l2BlockNum, 1, LogType.NOTEENCRYPTED); const unrolledLogs = EncryptedL2BlockL2Logs.unrollLogs(encryptedLogs); expect(unrolledLogs.length).toBe(numEncryptedLogs); }; diff --git a/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts b/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts index 6cdcf2ba2e8..b6cb50f4458 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts +++ b/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts @@ -1,11 +1,14 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AccountManager, + type AztecNode, type DebugLogger, + type DeployL1Contracts, ExtendedNote, Fr, NativeFeePaymentMethod, Note, + type PXE, PrivateFeePaymentMethod, PublicFeePaymentMethod, Schnorr, @@ -25,14 +28,7 @@ import { import { jest } from '@jest/globals'; -import { - type BalancesFn, - type EndToEndContext, - expectMapping, - getBalancesFn, - publicDeployAccounts, - setup, -} from './fixtures/utils.js'; +import { type BalancesFn, expectMapping, getBalancesFn, publicDeployAccounts, setup } from './fixtures/utils.js'; import { GasPortalTestingHarnessFactory, type IGasBridgingTestHarness } from './shared/gas_portal_test_harness.js'; const TOKEN_NAME = 'BananaCoin'; @@ -43,8 +39,11 @@ const BRIDGED_FPC_GAS = BigInt(10e12); jest.setTimeout(1_000_000); describe('e2e_fees_account_init', () => { - let ctx: EndToEndContext; let logger: DebugLogger; + let aztecNode: AztecNode; + let pxe: PXE; + let teardown: () => Promise; + let sequencer: Wallet; let sequencersAddress: AztecAddress; let alice: Wallet; @@ -84,22 +83,24 @@ describe('e2e_fees_account_init', () => { } beforeAll(async () => { - ctx = await setup(2, {}, {}, true); - logger = ctx.logger; - [sequencer, alice] = ctx.wallets; + let wallets: Wallet[]; + let wallet: Wallet; + let deployL1ContractsValues: DeployL1Contracts; + ({ logger, wallets, wallet, aztecNode, pxe, deployL1ContractsValues } = await setup(2, {}, {}, true)); + [sequencer, alice] = wallets; sequencersAddress = sequencer.getAddress(); - await ctx.aztecNode.setConfig({ + await aztecNode.setConfig({ feeRecipient: sequencersAddress, }); gasBridgeTestHarness = await GasPortalTestingHarnessFactory.create({ - aztecNode: ctx.aztecNode, - pxeService: ctx.pxe, - publicClient: ctx.deployL1ContractsValues.publicClient, - walletClient: ctx.deployL1ContractsValues.walletClient, - wallet: ctx.wallet, - logger: ctx.logger, + aztecNode, + pxeService: pxe, + publicClient: deployL1ContractsValues.publicClient, + walletClient: deployL1ContractsValues.walletClient, + wallet, + logger, mockL1: false, }); @@ -122,7 +123,7 @@ describe('e2e_fees_account_init', () => { gasBalances = getBalancesFn('⛽', gas.methods.balance_of_public, logger); }); - afterAll(() => ctx.teardown()); + afterAll(() => teardown()); beforeEach(() => { gasSettings = GasSettings.default(); @@ -130,7 +131,7 @@ describe('e2e_fees_account_init', () => { actualFee = 1n; bobsSecretKey = Fr.random(); bobsPrivateSigningKey = Fq.random(); - bobsAccountManager = getSchnorrAccount(ctx.pxe, bobsSecretKey, bobsPrivateSigningKey, Fr.random()); + bobsAccountManager = getSchnorrAccount(pxe, bobsSecretKey, bobsPrivateSigningKey, Fr.random()); bobsAddress = bobsAccountManager.getCompleteAddress().address; }); @@ -292,7 +293,7 @@ describe('e2e_fees_account_init', () => { const completeAddress = CompleteAddress.fromSecretKeyAndInstance(bobsSecretKey, instance); // alice registers the keys in the PXE - await ctx.pxe.registerRecipient(completeAddress); + await pxe.registerRecipient(completeAddress); // and deploys bob's account, paying the fee from her balance const publicKeysHash = deriveKeys(bobsSecretKey).publicKeys.hash(); @@ -339,6 +340,6 @@ describe('e2e_fees_account_init', () => { const note = new Note([new Fr(amount), secretHash]); // this note isn't encrypted but we need to provide a registered public key const extendedNote = new ExtendedNote(note, owner, bananaCoin.address, storageSlot, noteTypeId, txHash); - await ctx.pxe.addNote(extendedNote); + await pxe.addNote(extendedNote); } }); diff --git a/yarn-project/foundation/src/fields/point.ts b/yarn-project/foundation/src/fields/point.ts index dfb5065c03d..9fd8e8fdf03 100644 --- a/yarn-project/foundation/src/fields/point.ts +++ b/yarn-project/foundation/src/fields/point.ts @@ -1,3 +1,4 @@ +import { poseidon2Hash } from '../crypto/index.js'; import { BufferReader, FieldReader, serializeToBuffer } from '../serialize/index.js'; import { Fr } from './fields.js'; @@ -129,6 +130,10 @@ export class Point { isZero() { return this.x.isZero() && this.y.isZero(); } + + hash() { + return poseidon2Hash(this.toFields()); + } } /** diff --git a/yarn-project/key-store/src/test_key_store.test.ts b/yarn-project/key-store/src/test_key_store.test.ts index 2395dbf1472..1dde6a3f02b 100644 --- a/yarn-project/key-store/src/test_key_store.test.ts +++ b/yarn-project/key-store/src/test_key_store.test.ts @@ -1,4 +1,11 @@ -import { AztecAddress, Fr } from '@aztec/circuits.js'; +import { + AztecAddress, + Fq, + Fr, + computeAppNullifierSecretKey, + deriveKeys, + derivePublicKeyFromSecretKey, +} from '@aztec/circuits.js'; import { openTmpStore } from '@aztec/kv-store/utils'; import { TestKeyStore } from './test_key_store.js'; @@ -9,6 +16,10 @@ describe('TestKeyStore', () => { // Arbitrary fixed values const sk = new Fr(8923n); + const keys = deriveKeys(sk); + const derivedMasterNullifierPublicKey = derivePublicKeyFromSecretKey(keys.masterNullifierSecretKey); + const computedMasterNullifierPublicKeyHash = derivedMasterNullifierPublicKey.hash(); + const partialAddress = new Fr(243523n); const { address: accountAddress } = await keyStore.addAccount(sk, partialAddress); @@ -16,7 +27,7 @@ describe('TestKeyStore', () => { `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, ); - const masterNullifierPublicKey = await keyStore.getMasterNullifierPublicKey(accountAddress); + const masterNullifierPublicKey = await keyStore.getMasterNullifierPublicKey(computedMasterNullifierPublicKeyHash); expect(masterNullifierPublicKey.toString()).toMatchInlineSnapshot( `"0x2ef5d15dd65d29546680ab72846fb071f41cb9f2a0212215e6c560e29df4ff650ce764818364b376be92dc2f49577fe440e64a16012584f7c4ee94f7edbc323a"`, ); @@ -44,7 +55,10 @@ describe('TestKeyStore', () => { // Arbitrary app contract address const appAddress = AztecAddress.fromBigInt(624n); - const appNullifierSecretKey = await keyStore.getAppNullifierSecretKey(accountAddress, appAddress); + const appNullifierSecretKey = await keyStore.getAppNullifierSecretKey( + computedMasterNullifierPublicKeyHash, + appAddress, + ); expect(appNullifierSecretKey.toString()).toMatchInlineSnapshot( `"0x230a44dfe7cfec7a735c89f7289c5cb5d2c3dc0bf5d3505917fd2476f67873a8"`, ); @@ -71,4 +85,77 @@ describe('TestKeyStore', () => { `"0x0fde74d5e504c73b58aad420dd72590fc6004571411e7f77c45378714195a52b"`, ); }); + + it('nullifier key rotation tests', async () => { + const keyStore = new TestKeyStore(openTmpStore()); + + // Arbitrary fixed values + const sk = new Fr(8923n); + const partialAddress = new Fr(243523n); + + const { address: accountAddress } = await keyStore.addAccount(sk, partialAddress); + expect(accountAddress.toString()).toMatchInlineSnapshot( + `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, + ); + + // Arbitrary fixed values + const newMasterNullifierSecretKeys = [new Fq(420n), new Fq(69n), new Fq(42069n)]; + const newDerivedMasterNullifierPublicKeys = [ + derivePublicKeyFromSecretKey(newMasterNullifierSecretKeys[0]), + derivePublicKeyFromSecretKey(newMasterNullifierSecretKeys[1]), + derivePublicKeyFromSecretKey(newMasterNullifierSecretKeys[2]), + ]; + + const newComputedMasterNullifierPublicKeyHashes = [ + newDerivedMasterNullifierPublicKeys[0].hash(), + newDerivedMasterNullifierPublicKeys[1].hash(), + newDerivedMasterNullifierPublicKeys[2].hash(), + ]; + + // We rotate our nullifier key + await keyStore.rotateMasterNullifierKey(accountAddress, newMasterNullifierSecretKeys[0]); + await keyStore.rotateMasterNullifierKey(accountAddress, newMasterNullifierSecretKeys[1]); + await keyStore.rotateMasterNullifierKey(accountAddress, newMasterNullifierSecretKeys[2]); + + // We make sure we can get master nullifier public keys with master nullifier public key hashes + expect(await keyStore.getMasterNullifierPublicKey(newComputedMasterNullifierPublicKeyHashes[2])).toEqual( + newDerivedMasterNullifierPublicKeys[2], + ); + expect(await keyStore.getMasterNullifierPublicKey(newComputedMasterNullifierPublicKeyHashes[1])).toEqual( + newDerivedMasterNullifierPublicKeys[1], + ); + expect(await keyStore.getMasterNullifierPublicKey(newComputedMasterNullifierPublicKeyHashes[0])).toEqual( + newDerivedMasterNullifierPublicKeys[0], + ); + + // Arbitrary app contract address + const appAddress = AztecAddress.fromBigInt(624n); + + // We make sure we can get app nullifier secret keys with master nullifier public key hashes + const appNullifierSecretKey0 = await keyStore.getAppNullifierSecretKey( + newComputedMasterNullifierPublicKeyHashes[0], + appAddress, + ); + expect(appNullifierSecretKey0.toString()).toMatchInlineSnapshot( + `"0x296e42f1039b62290372d608fcab55b00a3f96c1c8aa347b2a830639c5a12757"`, + ); + const appNullifierSecretKey1 = await keyStore.getAppNullifierSecretKey( + newComputedMasterNullifierPublicKeyHashes[1], + appAddress, + ); + expect(appNullifierSecretKey1.toString()).toMatchInlineSnapshot( + `"0x019f2a705b68683f1d86da639a543411fa779af41896c3920d0c2d5226c686dd"`, + ); + const appNullifierSecretKey2 = await keyStore.getAppNullifierSecretKey( + newComputedMasterNullifierPublicKeyHashes[2], + appAddress, + ); + expect(appNullifierSecretKey2.toString()).toMatchInlineSnapshot( + `"0x117445c8819c06b9a0889e5cce1f550e32ec6993c23f57bc9fc5cda05df520ae"`, + ); + + expect(appNullifierSecretKey0).toEqual(computeAppNullifierSecretKey(newMasterNullifierSecretKeys[0], appAddress)); + expect(appNullifierSecretKey1).toEqual(computeAppNullifierSecretKey(newMasterNullifierSecretKeys[1], appAddress)); + expect(appNullifierSecretKey2).toEqual(computeAppNullifierSecretKey(newMasterNullifierSecretKeys[2], appAddress)); + }); }); diff --git a/yarn-project/key-store/src/test_key_store.ts b/yarn-project/key-store/src/test_key_store.ts index 2d0e0672104..6e5416be404 100644 --- a/yarn-project/key-store/src/test_key_store.ts +++ b/yarn-project/key-store/src/test_key_store.ts @@ -2,6 +2,7 @@ import { type KeyStore, type PublicKey } from '@aztec/circuit-types'; import { AztecAddress, CompleteAddress, + Fq, Fr, GeneratorIndex, type GrumpkinPrivateKey, @@ -11,6 +12,7 @@ import { computeAddress, computeAppNullifierSecretKey, deriveKeys, + derivePublicKeyFromSecretKey, } from '@aztec/circuits.js'; import { poseidon2Hash } from '@aztec/foundation/crypto'; import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; @@ -58,16 +60,23 @@ export class TestKeyStore implements KeyStore { await this.#keys.set(`${accountAddress.toString()}-public_keys_hash`, publicKeysHash.toBuffer()); // Naming of keys is as follows ${from}-${to}_m - await this.#keys.set(`${accountAddress.toString()}-nsk_m`, masterNullifierSecretKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-ivsk_m`, masterIncomingViewingSecretKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-ovsk_m`, masterOutgoingViewingSecretKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-tsk_m`, masterTaggingSecretKey.toBuffer()); + // The key of the following is different from the others because the buffer can store multiple keys + await this.#keys.set(`${accountAddress.toString()}-ns_keys_m`, masterNullifierSecretKey.toBuffer()); - await this.#keys.set(`${accountAddress.toString()}-npk_m`, publicKeys.masterNullifierPublicKey.toBuffer()); + await this.#keys.set(`${accountAddress.toString()}-np_keys_m`, publicKeys.masterNullifierPublicKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-ivpk_m`, publicKeys.masterIncomingViewingPublicKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-ovpk_m`, publicKeys.masterOutgoingViewingPublicKey.toBuffer()); await this.#keys.set(`${accountAddress.toString()}-tpk_m`, publicKeys.masterTaggingPublicKey.toBuffer()); + // We store a npk_m_hash-account_address map to make address easy to obtain with the hash later on + await this.#keys.set( + `${publicKeys.masterNullifierPublicKey.hash().toString()}-npk_m_hash`, + accountAddress.toBuffer(), + ); + // At last, we return the newly derived account address return Promise.resolve(new CompleteAddress(accountAddress, publicKeys, partialAddress)); } @@ -78,28 +87,49 @@ export class TestKeyStore implements KeyStore { */ public getAccounts(): Promise { const allMapKeys = Array.from(this.#keys.keys()); - // We return account addresses based on the map keys that end with '-nsk_m' - const accounts = allMapKeys.filter(key => key.endsWith('-nsk_m')).map(key => key.split('-')[0]); + // We return account addresses based on the map keys that end with '-ivsk_m' + const accounts = allMapKeys.filter(key => key.endsWith('-ivsk_m')).map(key => key.split('-')[0]); return Promise.resolve(accounts.map(account => AztecAddress.fromString(account))); } /** - * Gets the master nullifier public key for a given account or master nullifier public key hash. - * @throws If the account does not exist in the key store. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Gets the master nullifier public key for a given master nullifier public key hash. + * @throws If the account corresponding to the master nullifier public key hash does not exist in the key store. + * @param npkMHash - The master nullifier public key hash. * @returns The master nullifier public key for the account. */ - public async getMasterNullifierPublicKey(accountOrNpkMHash: AztecAddress | Fr): Promise { - const masterNullifierPublicKeyBuffer = - this.#keys.get(`${accountOrNpkMHash.toString()}-npk_m`) ?? - this.#keys.get(`${this.#getAccountAddressForMasterNullifierPublicKeyHash(accountOrNpkMHash)?.toString()}-npk_m`); + public getMasterNullifierPublicKey(npkMHash: Fr): Promise { + // Get the address for npk_m_hash + const accountAddressBuffer = this.#keys.get(`${npkMHash.toString()}-npk_m_hash`); + if (!accountAddressBuffer) { + throw new Error(`Could no find address for master nullifier public key hash ${npkMHash}.`); + } + const accountAddress = AztecAddress.fromBuffer(accountAddressBuffer); - if (!masterNullifierPublicKeyBuffer) { + // Get the master nullifier public keys buffer for the account + const masterNullifierPublicKeysBuffer = this.#keys.get(`${accountAddress.toString()}-np_keys_m`); + if (!masterNullifierPublicKeysBuffer) { throw new Error( - `Account or master nullifier public key hash ${accountOrNpkMHash} does not exist. Registered accounts: ${await this.getAccounts()}.`, + `Could not find master nullifier public key for account ${accountAddress.toString()} whose address was successfully obtained with npk_m_hash ${npkMHash.toString()}.`, ); } - return Promise.resolve(Point.fromBuffer(masterNullifierPublicKeyBuffer)); + + // We check that the buffer's length is a multiple of Point.SIZE_IN_BYTES + if (masterNullifierPublicKeysBuffer.byteLength % Point.SIZE_IN_BYTES !== 0) { + throw new Error("Master nullifier public key buffer's length is not a multiple of Point.SIZE_IN_BYTES."); + } + + // Now we iterate over the public keys in the buffer to find the one that matches the hash + const numKeys = masterNullifierPublicKeysBuffer.byteLength / Point.SIZE_IN_BYTES; + for (let i = 0; i < numKeys; i++) { + const masterNullifierPublicKey = Point.fromBuffer( + masterNullifierPublicKeysBuffer.subarray(i * Point.SIZE_IN_BYTES, (i + 1) * Point.SIZE_IN_BYTES), + ); + if (masterNullifierPublicKey.hash().equals(npkMHash)) { + return Promise.resolve(masterNullifierPublicKey); + } + } + throw new Error(`Could not find master nullifier public key for npk_m_hash ${npkMHash.toString()}.`); } /** @@ -151,25 +181,47 @@ export class TestKeyStore implements KeyStore { } /** - * Derives and returns the application nullifier secret key for a given account or master nullifier public key hash. - * @throws If the account does not exist in the key store. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Derives and returns the application nullifier secret key for a given master nullifier public key hash. + * @throws If the account corresponding to the master nullifier public key hash does not exist in the key store. + * @param npkMHash - The master nullifier public key hash. * @param app - The application address to retrieve the nullifier secret key for. * @returns A Promise that resolves to the application nullifier secret key. */ - public async getAppNullifierSecretKey(accountOrNpkMHash: AztecAddress | Fr, app: AztecAddress): Promise { - const masterNullifierSecretKeyBuffer = - this.#keys.get(`${accountOrNpkMHash.toString()}-nsk_m`) ?? - this.#keys.get(`${this.#getAccountAddressForMasterNullifierPublicKeyHash(accountOrNpkMHash)?.toString()}-nsk_m`); + public getAppNullifierSecretKey(npkMHash: Fr, app: AztecAddress): Promise { + // First we get the account address for npk_m_hash + const accountAddressBuffer = this.#keys.get(`${npkMHash.toString()}-npk_m_hash`); + if (!accountAddressBuffer) { + throw new Error(`Could no find address for master nullifier public key hash ${npkMHash}.`); + } - if (!masterNullifierSecretKeyBuffer) { + // Now we get the master nullifier secret keys for the account + const masterNullifierSecretKeysBuffer = this.#keys.get( + `${AztecAddress.fromBuffer(accountAddressBuffer).toString()}-ns_keys_m`, + ); + if (!masterNullifierSecretKeysBuffer) { throw new Error( - `Account or master nullifier public key hash ${accountOrNpkMHash} does not exist. Registered accounts: ${await this.getAccounts()}.`, + `Could not find master nullifier secret keys for account ${AztecAddress.fromBuffer( + accountAddressBuffer, + ).toString()}`, ); } - const masterNullifierSecretKey = GrumpkinScalar.fromBuffer(masterNullifierSecretKeyBuffer); - const appNullifierSecretKey = computeAppNullifierSecretKey(masterNullifierSecretKey, app); - return Promise.resolve(appNullifierSecretKey); + + // Now we iterate over all the secret keys to find the one that matches the hash + const numKeys = masterNullifierSecretKeysBuffer.byteLength / GrumpkinScalar.SIZE_IN_BYTES; + for (let i = 0; i < numKeys; i++) { + const secretKey = GrumpkinScalar.fromBuffer( + masterNullifierSecretKeysBuffer.subarray( + i * GrumpkinScalar.SIZE_IN_BYTES, + (i + 1) * GrumpkinScalar.SIZE_IN_BYTES, + ), + ); + const publicKey = derivePublicKeyFromSecretKey(secretKey); + if (publicKey.hash().equals(npkMHash)) { + return Promise.resolve(computeAppNullifierSecretKey(secretKey, app)); + } + } + + throw new Error(`Could not find master nullifier secret key for npk_m_hash ${npkMHash.toString()}.`); } /** @@ -233,21 +285,60 @@ export class TestKeyStore implements KeyStore { * @dev Used when feeding the master nullifier secret key to the kernel circuit for nullifier keys verification. */ public getMasterNullifierSecretKeyForPublicKey(masterNullifierPublicKey: PublicKey): Promise { - // We iterate over the map keys to find the account address that corresponds to the provided public key - for (const [key, value] of this.#keys.entries()) { - if (value.equals(masterNullifierPublicKey.toBuffer()) && key.endsWith('-npk_m')) { - // We extract the account address from the map key - const accountAddress = key.split('-')[0]; - // We fetch the secret key and return it - const masterNullifierSecretKeyBuffer = this.#keys.get(`${accountAddress.toString()}-nsk_m`); - if (!masterNullifierSecretKeyBuffer) { - throw new Error(`Could not find master nullifier secret key for account ${accountAddress.toString()}`); - } - return Promise.resolve(GrumpkinScalar.fromBuffer(masterNullifierSecretKeyBuffer)); + // We get the account address associated with the master nullifier public key hash + const accountAddressBuffer = this.#keys.get(`${masterNullifierPublicKey.hash().toString()}-npk_m_hash`); + if (!accountAddressBuffer) { + throw new Error( + `Could not find account address for master nullifier public key ${masterNullifierPublicKey.toString()}`, + ); + } + const accountAddress = AztecAddress.fromBuffer(accountAddressBuffer); + + // We fetch the public keys and find this specific public key's position in the buffer + const masterNullifierPublicKeysBuffer = this.#keys.get(`${accountAddress.toString()}-np_keys_m`); + if (!masterNullifierPublicKeysBuffer) { + throw new Error(`Could not find master nullifier public keys for account ${accountAddress.toString()}`); + } + + // We check that the buffer's length is a multiple of Point.SIZE_IN_BYTES + if (masterNullifierPublicKeysBuffer.byteLength % Point.SIZE_IN_BYTES !== 0) { + throw new Error("Master nullifier public key buffer's length is not a multiple of Point.SIZE_IN_BYTES."); + } + + // Now we iterate over the public keys in the buffer to find the one that matches the hash + const numKeys = masterNullifierPublicKeysBuffer.byteLength / Point.SIZE_IN_BYTES; + let keyIndex = -1; + for (let i = 0; i < numKeys; i++) { + const publicKey = Point.fromBuffer( + masterNullifierPublicKeysBuffer.subarray(i * Point.SIZE_IN_BYTES, (i + 1) * Point.SIZE_IN_BYTES), + ); + if (publicKey.equals(masterNullifierPublicKey)) { + keyIndex = i; + break; } } - throw new Error(`Could not find master nullifier secret key for public key ${masterNullifierPublicKey.toString()}`); + // Now we fetch the secret keys buffer and extract the secret key at the same index + const masterNullifierSecretKeysBuffer = this.#keys.get(`${accountAddress.toString()}-ns_keys_m`); + if (!masterNullifierSecretKeysBuffer) { + throw new Error(`Could not find master nullifier secret keys for account ${accountAddress.toString()}`); + } + + // We extract the secret key from the buffer + const secretKeyBuffer = masterNullifierSecretKeysBuffer.subarray( + keyIndex * GrumpkinScalar.SIZE_IN_BYTES, + (keyIndex + 1) * GrumpkinScalar.SIZE_IN_BYTES, + ); + const secretKey = GrumpkinScalar.fromBuffer(secretKeyBuffer); + + // We sanity check that it's possible to derive the public key from the secret key + if (!derivePublicKeyFromSecretKey(secretKey).equals(masterNullifierPublicKey)) { + throw new Error( + `Could not find master nullifier secret key for public key ${masterNullifierPublicKey.toString()}`, + ); + } + + return Promise.resolve(secretKey); } /** @@ -296,18 +387,41 @@ export class TestKeyStore implements KeyStore { return Promise.resolve(Fr.fromBuffer(publicKeysHashBuffer)); } - #getAccountAddressForMasterNullifierPublicKeyHash(masterNullifierPublicKeyHash: Fr): AztecAddress | undefined { - for (const [key, value] of this.#keys.entries()) { - if (key.endsWith('-npk_m')) { - const computedMasterNullifierPublicKeyHash = poseidon2Hash(Point.fromBuffer(value).toFields()); - if (computedMasterNullifierPublicKeyHash.equals(masterNullifierPublicKeyHash)) { - // We extract the account address from the map key - const accountAddress = key.split('-')[0]; - return AztecAddress.fromString(accountAddress); - } - } + /** + * Rotates the master nullifier key for the specified account. + * + * @dev This function updates the secret and public keys associated with the account. + * It appends a new secret key to the existing secret keys, derives the + * corresponding public key, and updates the stored keys accordingly. + * + * @param account - The account address for which the master nullifier key is being rotated. + * @param newSecretKey - (Optional) A new secret key of type Fq. If not provided, a random key is generated. + * @throws If the account does not have existing nullifier secret keys or public keys. + * @returns A Promise that resolves when the key rotation is complete. + */ + public async rotateMasterNullifierKey(account: AztecAddress, newSecretKey: Fq = Fq.random()) { + // We append the secret key to the original secret key + const secretKeysBuffer = this.#keys.get(`${account.toString()}-ns_keys_m`); + if (!secretKeysBuffer) { + throw new Error(`Could not find nullifier secret keys for account ${account.toString()}`); + } + + // We append the new secret key to the buffer of secret keys + const newSecretKeysBuffer = Buffer.concat([secretKeysBuffer, newSecretKey.toBuffer()]); + await this.#keys.set(`${account.toString()}-ns_keys_m`, newSecretKeysBuffer); + + // Now we derive the public key from the new secret key and append it to the buffer of original public keys + const newPublicKey = derivePublicKeyFromSecretKey(newSecretKey); + const publicKeysBuffer = this.#keys.get(`${account.toString()}-np_keys_m`); + if (!publicKeysBuffer) { + throw new Error(`Could not find nullifier public keys for account ${account.toString()}`); } - return undefined; + // We append the new public key to the buffer of public keys + const newPublicKeysBuffer = Buffer.concat([publicKeysBuffer, newPublicKey.toBuffer()]); + await this.#keys.set(`${account.toString()}-np_keys_m`, newPublicKeysBuffer); + + // We store a npk_m_hash-account_address map to make address easy to obtain with the hash later on + await this.#keys.set(`${newPublicKey.hash().toString()}-npk_m_hash`, account.toBuffer()); } } diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index 380d7ee8e0d..750d1909db5 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -29,10 +29,12 @@ import { type KernelData, L2ToL1Message, type LeafDataReadHint, + LogHash, MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, @@ -52,6 +54,7 @@ import { type NonMembershipHint, NoteHash, type NoteHashReadRequestHints, + NoteLogHash, Nullifier, NullifierKeyValidationRequest, type NullifierLeafPreimage, @@ -111,7 +114,6 @@ import { ScopedNullifierKeyValidationRequest, ScopedReadRequest, type SettledReadHint, - SideEffect, type StateDiffHints, StateReference, TxContext, @@ -148,6 +150,7 @@ import type { KernelData as KernelDataNoir, L2ToL1Message as L2ToL1MessageNoir, LeafDataReadHint as LeafDataReadHintNoir, + LogHash as LogHashNoir, MaxBlockNumber as MaxBlockNumberNoir, MembershipWitness as MembershipWitnessNoir, MergeRollupInputs as MergeRollupInputsNoir, @@ -159,6 +162,7 @@ import type { NoteHash as NoteHashNoir, NoteHashReadRequestHints as NoteHashReadRequestHintsNoir, NoteHashSettledReadHint as NoteHashSettledReadHintNoir, + NoteLogHash as NoteLogHashNoir, NullifierKeyValidationRequest as NullifierKeyValidationRequestNoir, NullifierLeafPreimage as NullifierLeafPreimageNoir, Nullifier as NullifierNoir, @@ -213,7 +217,6 @@ import type { ScopedNullifierKeyValidationRequest as ScopedNullifierKeyValidationRequestNoir, ScopedNullifier as ScopedNullifierNoir, ScopedReadRequest as ScopedReadRequestNoir, - SideEffect as SideEffectNoir, StateDiffHints as StateDiffHintsNoir, StateReference as StateReferenceNoir, StorageRead as StorageReadNoir, @@ -591,24 +594,57 @@ function mapScopedNullifierFromNoir(nullifier: ScopedNullifierNoir) { } /** - * Maps a SideEffect to a noir side effect. - * @param sideEffect - The SideEffect. + * Maps a LogHash to a noir LogHash. + * @param sideEffect - The LogHash. * @returns The noir side effect. */ -export function mapSideEffectToNoir(sideEffect: SideEffect): SideEffectNoir { +export function mapLogHashToNoir(logHash: LogHash): LogHashNoir { return { - value: mapFieldToNoir(sideEffect.value), - counter: mapFieldToNoir(sideEffect.counter), + value: mapFieldToNoir(logHash.value), + counter: mapNumberToNoir(logHash.counter), + length: mapFieldToNoir(logHash.length), }; } /** - * Maps a noir side effect to a SideEffect. - * @param sideEffect - The noir SideEffect. + * Maps a noir LogHash to a LogHash. + * @param sideEffect - The noir LogHash. * @returns The TS side effect. */ -export function mapSideEffectFromNoir(sideEffect: SideEffectNoir): SideEffect { - return new SideEffect(mapFieldFromNoir(sideEffect.value), mapFieldFromNoir(sideEffect.counter)); +export function mapLogHashFromNoir(logHash: LogHashNoir): LogHash { + return new LogHash( + mapFieldFromNoir(logHash.value), + mapNumberFromNoir(logHash.counter), + mapFieldFromNoir(logHash.length), + ); +} + +/** + * Maps a LogHash to a noir LogHash. + * @param sideEffect - The LogHash. + * @returns The noir side effect. + */ +export function mapNoteLogHashToNoir(noteLogHash: NoteLogHash): NoteLogHashNoir { + return { + value: mapFieldToNoir(noteLogHash.value), + counter: mapNumberToNoir(noteLogHash.counter), + length: mapFieldToNoir(noteLogHash.length), + note_hash_counter: mapNumberToNoir(noteLogHash.noteHashCounter), + }; +} + +/** + * Maps a noir LogHash to a LogHash. + * @param sideEffect - The noir LogHash. + * @returns The TS side effect. + */ +export function mapNoteLogHashFromNoir(noteLogHash: NoteLogHashNoir): NoteLogHash { + return new NoteLogHash( + mapFieldFromNoir(noteLogHash.value), + mapNumberFromNoir(noteLogHash.counter), + mapFieldFromNoir(noteLogHash.length), + mapNumberFromNoir(noteLogHash.note_hash_counter), + ); } /** @@ -759,8 +795,9 @@ export function mapPrivateCircuitPublicInputsToNoir( new_l2_to_l1_msgs: mapTuple(privateCircuitPublicInputs.newL2ToL1Msgs, mapL2ToL1MessageToNoir), start_side_effect_counter: mapFieldToNoir(privateCircuitPublicInputs.startSideEffectCounter), end_side_effect_counter: mapFieldToNoir(privateCircuitPublicInputs.endSideEffectCounter), - encrypted_logs_hashes: mapTuple(privateCircuitPublicInputs.encryptedLogsHashes, mapSideEffectToNoir), - unencrypted_logs_hashes: mapTuple(privateCircuitPublicInputs.unencryptedLogsHashes, mapSideEffectToNoir), + note_encrypted_logs_hashes: mapTuple(privateCircuitPublicInputs.noteEncryptedLogsHashes, mapNoteLogHashToNoir), + encrypted_logs_hashes: mapTuple(privateCircuitPublicInputs.encryptedLogsHashes, mapLogHashToNoir), + unencrypted_logs_hashes: mapTuple(privateCircuitPublicInputs.unencryptedLogsHashes, mapLogHashToNoir), encrypted_log_preimages_length: mapFieldToNoir(privateCircuitPublicInputs.encryptedLogPreimagesLength), unencrypted_log_preimages_length: mapFieldToNoir(privateCircuitPublicInputs.unencryptedLogPreimagesLength), historical_header: mapHeaderToNoir(privateCircuitPublicInputs.historicalHeader), @@ -1048,12 +1085,13 @@ export function mapPrivateAccumulatedDataFromNoir( MAX_NEW_L2_TO_L1_MSGS_PER_TX, mapScopedL2ToL1MessageFromNoir, ), - mapTupleFromNoir(privateAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapSideEffectFromNoir), mapTupleFromNoir( - privateAccumulatedData.unencrypted_logs_hashes, - MAX_UNENCRYPTED_LOGS_PER_TX, - mapSideEffectFromNoir, + privateAccumulatedData.note_encrypted_logs_hashes, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + mapNoteLogHashFromNoir, ), + mapTupleFromNoir(privateAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapLogHashFromNoir), + mapTupleFromNoir(privateAccumulatedData.unencrypted_logs_hashes, MAX_UNENCRYPTED_LOGS_PER_TX, mapLogHashFromNoir), mapFieldFromNoir(privateAccumulatedData.encrypted_log_preimages_length), mapFieldFromNoir(privateAccumulatedData.unencrypted_log_preimages_length), mapTupleFromNoir( @@ -1074,8 +1112,9 @@ export function mapPrivateAccumulatedDataToNoir(data: PrivateAccumulatedData): P new_note_hashes: mapTuple(data.newNoteHashes, mapScopedNoteHashToNoir), new_nullifiers: mapTuple(data.newNullifiers, mapScopedNullifierToNoir), new_l2_to_l1_msgs: mapTuple(data.newL2ToL1Msgs, mapScopedL2ToL1MessageToNoir), - encrypted_logs_hashes: mapTuple(data.encryptedLogsHashes, mapSideEffectToNoir), - unencrypted_logs_hashes: mapTuple(data.unencryptedLogsHashes, mapSideEffectToNoir), + note_encrypted_logs_hashes: mapTuple(data.noteEncryptedLogsHashes, mapNoteLogHashToNoir), + encrypted_logs_hashes: mapTuple(data.encryptedLogsHashes, mapLogHashToNoir), + unencrypted_logs_hashes: mapTuple(data.unencryptedLogsHashes, mapLogHashToNoir), encrypted_log_preimages_length: mapFieldToNoir(data.encryptedLogPreimagesLength), unencrypted_log_preimages_length: mapFieldToNoir(data.unencryptedLogPreimagesLength), private_call_stack: mapTuple(data.privateCallStack, mapCallRequestToNoir), @@ -1090,8 +1129,13 @@ export function mapPublicAccumulatedDataFromNoir( mapTupleFromNoir(publicAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapNoteHashFromNoir), mapTupleFromNoir(publicAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapNullifierFromNoir), mapTupleFromNoir(publicAccumulatedData.new_l2_to_l1_msgs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, mapFieldFromNoir), - mapTupleFromNoir(publicAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapSideEffectFromNoir), - mapTupleFromNoir(publicAccumulatedData.unencrypted_logs_hashes, MAX_UNENCRYPTED_LOGS_PER_TX, mapSideEffectFromNoir), + mapTupleFromNoir( + publicAccumulatedData.note_encrypted_logs_hashes, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + mapNoteLogHashFromNoir, + ), + mapTupleFromNoir(publicAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapLogHashFromNoir), + mapTupleFromNoir(publicAccumulatedData.unencrypted_logs_hashes, MAX_UNENCRYPTED_LOGS_PER_TX, mapLogHashFromNoir), mapFieldFromNoir(publicAccumulatedData.encrypted_log_preimages_length), mapFieldFromNoir(publicAccumulatedData.unencrypted_log_preimages_length), mapTupleFromNoir( @@ -1115,8 +1159,9 @@ export function mapPublicAccumulatedDataToNoir( new_note_hashes: mapTuple(publicAccumulatedData.newNoteHashes, mapNoteHashToNoir), new_nullifiers: mapTuple(publicAccumulatedData.newNullifiers, mapNullifierToNoir), new_l2_to_l1_msgs: mapTuple(publicAccumulatedData.newL2ToL1Msgs, mapFieldToNoir), - encrypted_logs_hashes: mapTuple(publicAccumulatedData.encryptedLogsHashes, mapSideEffectToNoir), - unencrypted_logs_hashes: mapTuple(publicAccumulatedData.unencryptedLogsHashes, mapSideEffectToNoir), + note_encrypted_logs_hashes: mapTuple(publicAccumulatedData.noteEncryptedLogsHashes, mapNoteLogHashToNoir), + encrypted_logs_hashes: mapTuple(publicAccumulatedData.encryptedLogsHashes, mapLogHashToNoir), + unencrypted_logs_hashes: mapTuple(publicAccumulatedData.unencryptedLogsHashes, mapLogHashToNoir), encrypted_log_preimages_length: mapFieldToNoir(publicAccumulatedData.encryptedLogPreimagesLength), unencrypted_log_preimages_length: mapFieldToNoir(publicAccumulatedData.unencryptedLogPreimagesLength), public_data_update_requests: mapTuple( @@ -1181,6 +1226,7 @@ export function mapCombinedAccumulatedDataFromNoir( mapTupleFromNoir(combinedAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapFieldFromNoir), mapTupleFromNoir(combinedAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapFieldFromNoir), mapTupleFromNoir(combinedAccumulatedData.new_l2_to_l1_msgs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, mapFieldFromNoir), + mapFieldFromNoir(combinedAccumulatedData.note_encrypted_logs_hash), mapFieldFromNoir(combinedAccumulatedData.encrypted_logs_hash), mapFieldFromNoir(combinedAccumulatedData.unencrypted_logs_hash), mapFieldFromNoir(combinedAccumulatedData.encrypted_log_preimages_length), @@ -1201,6 +1247,7 @@ export function mapCombinedAccumulatedDataToNoir( new_note_hashes: mapTuple(combinedAccumulatedData.newNoteHashes, mapFieldToNoir), new_nullifiers: mapTuple(combinedAccumulatedData.newNullifiers, mapFieldToNoir), new_l2_to_l1_msgs: mapTuple(combinedAccumulatedData.newL2ToL1Msgs, mapFieldToNoir), + note_encrypted_logs_hash: mapFieldToNoir(combinedAccumulatedData.noteEncryptedLogsHash), encrypted_logs_hash: mapFieldToNoir(combinedAccumulatedData.encryptedLogsHash), unencrypted_logs_hash: mapFieldToNoir(combinedAccumulatedData.unencryptedLogsHash), encrypted_log_preimages_length: mapFieldToNoir(combinedAccumulatedData.encryptedLogPreimagesLength), @@ -1423,6 +1470,7 @@ function mapPrivateKernelResetOutputsToNoir(inputs: PrivateKernelResetOutputs): return { note_hashes: mapTuple(inputs.noteHashes, mapScopedNoteHashToNoir), nullifiers: mapTuple(inputs.nullifiers, mapScopedNullifierToNoir), + note_encrypted_log_hashes: mapTuple(inputs.noteEncryptedLogHashes, mapNoteLogHashToNoir), }; } @@ -1432,9 +1480,11 @@ function mapPrivateKernelTailHintsToNoir(inputs: PrivateKernelTailHints): Privat sorted_new_note_hashes_indexes: mapTuple(inputs.sortedNewNoteHashesIndexes, mapNumberToNoir), sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapScopedNullifierToNoir), sorted_new_nullifiers_indexes: mapTuple(inputs.sortedNewNullifiersIndexes, mapNumberToNoir), - sorted_encrypted_log_hashes: mapTuple(inputs.sortedEncryptedLogHashes, mapSideEffectToNoir), + sorted_note_encrypted_log_hashes: mapTuple(inputs.sortedNoteEncryptedLogHashes, mapNoteLogHashToNoir), + sorted_note_encrypted_log_hashes_indexes: mapTuple(inputs.sortedNoteEncryptedLogHashesIndexes, mapNumberToNoir), + sorted_encrypted_log_hashes: mapTuple(inputs.sortedEncryptedLogHashes, mapLogHashToNoir), sorted_encrypted_log_hashes_indexes: mapTuple(inputs.sortedEncryptedLogHashesIndexes, mapNumberToNoir), - sorted_unencrypted_log_hashes: mapTuple(inputs.sortedUnencryptedLogHashes, mapSideEffectToNoir), + sorted_unencrypted_log_hashes: mapTuple(inputs.sortedUnencryptedLogHashes, mapLogHashToNoir), sorted_unencrypted_log_hashes_indexes: mapTuple(inputs.sortedUnencryptedLogHashesIndexes, mapNumberToNoir), }; } @@ -1446,6 +1496,7 @@ function mapPrivateKernelResetHintsToNoir(inputs: PrivateKernelResetHints): Priv mapNumberToNoir, ), transient_note_hash_indexes_for_nullifiers: mapTuple(inputs.transientNoteHashIndexesForNullifiers, mapNumberToNoir), + transient_note_hash_indexes_for_logs: mapTuple(inputs.transientNoteHashIndexesForLogs, mapNumberToNoir), note_hash_read_request_hints: mapNoteHashReadRequestHintsToNoir(inputs.noteHashReadRequestHints), nullifier_read_request_hints: mapNullifierReadRequestHintsToNoir(inputs.nullifierReadRequestHints), master_nullifier_secret_keys: mapTuple(inputs.masterNullifierSecretKeys, mapGrumpkinPrivateKeyToNoir), @@ -1629,7 +1680,7 @@ export function mapPublicCircuitPublicInputsToNoir( new_l2_to_l1_msgs: mapTuple(publicInputs.newL2ToL1Msgs, mapL2ToL1MessageToNoir), start_side_effect_counter: mapFieldToNoir(publicInputs.startSideEffectCounter), end_side_effect_counter: mapFieldToNoir(publicInputs.endSideEffectCounter), - unencrypted_logs_hashes: mapTuple(publicInputs.unencryptedLogsHashes, mapSideEffectToNoir), + unencrypted_logs_hashes: mapTuple(publicInputs.unencryptedLogsHashes, mapLogHashToNoir), unencrypted_log_preimages_length: mapFieldToNoir(publicInputs.unencryptedLogPreimagesLength), historical_header: mapHeaderToNoir(publicInputs.historicalHeader), global_variables: mapGlobalVariablesToNoir(publicInputs.globalVariables), diff --git a/yarn-project/p2p/src/service/tx_messages.ts b/yarn-project/p2p/src/service/tx_messages.ts index c4ec54e5db0..418704d492a 100644 --- a/yarn-project/p2p/src/service/tx_messages.ts +++ b/yarn-project/p2p/src/service/tx_messages.ts @@ -68,6 +68,7 @@ export function toTxMessage(tx: Tx): Buffer { const messageBuffer = Buffer.concat([ createMessageComponent(tx.data), createMessageComponent(tx.proof), + createMessageComponent(tx.noteEncryptedLogs), createMessageComponent(tx.encryptedLogs), createMessageComponent(tx.unencryptedLogs), createMessageComponents(tx.enqueuedPublicFunctionCalls), @@ -114,7 +115,11 @@ export function fromTxMessage(buffer: Buffer): Tx { const publicInputs = toObject(buffer.subarray(4), PrivateKernelTailCircuitPublicInputs); const proof = toObject(publicInputs.remainingData, Proof); - const encryptedLogs = toObject(proof.remainingData, EncryptedTxL2Logs); + const noteEncryptedLogs = toObject(proof.remainingData, EncryptedTxL2Logs); + if (!noteEncryptedLogs.obj) { + noteEncryptedLogs.obj = new EncryptedTxL2Logs([]); + } + const encryptedLogs = toObject(noteEncryptedLogs.remainingData, EncryptedTxL2Logs); if (!encryptedLogs.obj) { encryptedLogs.obj = new EncryptedTxL2Logs([]); } @@ -129,6 +134,7 @@ export function fromTxMessage(buffer: Buffer): Tx { return new Tx( publicInputs.obj!, proof.obj!, + noteEncryptedLogs.obj, encryptedLogs.obj, unencryptedLogs.obj, publicCalls.objects, diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index 065c5296edf..74c4e2cb62b 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -119,6 +119,7 @@ export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, se processedTx.data.end.newNullifiers[tx.data.forPublic!.end.newNullifiers.length - 1] = Fr.zero(); processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); + processedTx.data.end.noteEncryptedLogsHash = Fr.fromBuffer(processedTx.noteEncryptedLogs.hash(0)); processedTx.data.end.encryptedLogsHash = Fr.fromBuffer(processedTx.encryptedLogs.hash()); processedTx.data.end.unencryptedLogsHash = Fr.fromBuffer(processedTx.unencryptedLogs.hash()); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 302f35acd5c..48d144b05fd 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -408,6 +408,18 @@ export class ProvingOrchestrator { logger.debug('Not running base rollup, state invalid'); return; } + if ( + !tx.baseRollupInputs.kernelData.publicInputs.end.noteEncryptedLogsHash + .toBuffer() + .equals(tx.processedTx.noteEncryptedLogs.hash(0)) + ) { + provingState.reject( + `Note encrypted logs hash mismatch: ${ + tx.baseRollupInputs.kernelData.publicInputs.end.noteEncryptedLogsHash + } === ${Fr.fromBuffer(tx.processedTx.noteEncryptedLogs.hash(0))}`, + ); + return; + } if ( !tx.baseRollupInputs.kernelData.publicInputs.end.encryptedLogsHash .toBuffer() diff --git a/yarn-project/prover-client/src/stats.ts b/yarn-project/prover-client/src/stats.ts deleted file mode 100644 index fbd68bde90b..00000000000 --- a/yarn-project/prover-client/src/stats.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; -import type { CircuitName, CircuitProvingStats, CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; -import { type Logger } from '@aztec/foundation/log'; -import { type ServerProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; - -export function emitCircuitWitnessGenerationStats( - circuitName: CircuitName, - duration: number, - inputSize: number, - outputSize: number, - logger: Logger, -) { - const stats: CircuitWitnessGenerationStats = { - eventName: 'circuit-witness-generation', - circuitName, - inputSize, - outputSize, - duration, - }; - - logger.debug('Circuit witness generation stats', stats); -} - -export function emitCircuitProvingStats( - circuitName: CircuitName, - duration: number, - inputSize: number, - outputSize: number, - proofSize: number, - logger: Logger, -) { - const stats: CircuitProvingStats = { - eventName: 'circuit-proving', - circuitName, - duration, - inputSize, - outputSize, - proofSize, - }; - - logger.debug('Circuit proving stats', stats); -} - -export function mapPublicKernelToCircuitName(kernelType: PublicKernelRequest['type']): CircuitName { - switch (kernelType) { - case PublicKernelType.SETUP: - return 'public-kernel-setup'; - case PublicKernelType.APP_LOGIC: - return 'public-kernel-app-logic'; - case PublicKernelType.TEARDOWN: - return 'public-kernel-teardown'; - case PublicKernelType.TAIL: - return 'public-kernel-tail'; - default: - throw new Error(`Unknown kernel type: ${kernelType}`); - } -} - -export function circuitTypeToCircuitName(circuitType: ServerProtocolArtifact): CircuitName { - switch (circuitType) { - case 'BaseParityArtifact': - return 'base-parity'; - case 'RootParityArtifact': - return 'root-parity'; - case 'BaseRollupArtifact': - return 'base-rollup'; - case 'MergeRollupArtifact': - return 'merge-rollup'; - case 'RootRollupArtifact': - return 'root-rollup'; - case 'PublicKernelSetupArtifact': - return 'public-kernel-setup'; - case 'PublicKernelAppLogicArtifact': - return 'public-kernel-app-logic'; - case 'PublicKernelTeardownArtifact': - return 'public-kernel-teardown'; - case 'PublicKernelTailArtifact': - return 'public-kernel-tail'; - default: - throw new Error(`Unknown circuit type: ${circuitType}`); - } -} diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 2de3eaff5b2..f17bc651823 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -2,7 +2,6 @@ import { MerkleTreeId, type NoteFilter, NoteStatus, type PublicKey } from '@azte import { AztecAddress, CompleteAddress, Header } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr, type Point } from '@aztec/foundation/fields'; import { type AztecArray, @@ -386,27 +385,12 @@ export class KVPxeDatabase implements PxeDatabase { return value ? CompleteAddress.fromBuffer(value) : undefined; } - getCompleteAddress(accountOrNpkMHash: AztecAddress | Fr): Promise { - return Promise.resolve( - this.#getCompleteAddress(accountOrNpkMHash) ?? this.#getCompleteAddressWithNpkMHash(accountOrNpkMHash), - ); - } - - #getCompleteAddressWithNpkMHash(npkMHash: Fr): Promise { - const completeAddresses = this.#getCompleteAddresses(); - - const completeAddress = completeAddresses.find(completeAddress => - poseidon2Hash(completeAddress.publicKeys.masterNullifierPublicKey.toFields()).equals(npkMHash), - ); - return Promise.resolve(completeAddress); - } - - #getCompleteAddresses(): CompleteAddress[] { - return Array.from(this.#addresses).map(v => CompleteAddress.fromBuffer(v)); + getCompleteAddress(account: AztecAddress): Promise { + return Promise.resolve(this.#getCompleteAddress(account)); } getCompleteAddresses(): Promise { - return Promise.resolve(this.#getCompleteAddresses()); + return Promise.resolve(Array.from(this.#addresses).map(v => CompleteAddress.fromBuffer(v))); } getSynchedBlockNumberForPublicKey(publicKey: Point): number | undefined { diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index 13c930b2b92..e37d8fc7069 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -133,14 +133,14 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD addCompleteAddress(address: CompleteAddress): Promise; /** - * Retrieve the complete address associated to a given address or master nullifier public key hash. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Retrieve the complete address associated to a given address. + * @param account - The account address. * @returns A promise that resolves to a CompleteAddress instance if found, or undefined if not found. */ - getCompleteAddress(accountOrNpkMHash: AztecAddress | Fr): Promise; + getCompleteAddress(account: AztecAddress): Promise; /** - * Retrieves the list of complete address added to this database + * Retrieves the list of complete addresses added to this database * @returns A promise that resolves to an array of AztecAddress instances. */ getCompleteAddresses(): Promise; diff --git a/yarn-project/pxe/src/kernel_oracle/index.ts b/yarn-project/pxe/src/kernel_oracle/index.ts index 54b848c5531..26728bd6be2 100644 --- a/yarn-project/pxe/src/kernel_oracle/index.ts +++ b/yarn-project/pxe/src/kernel_oracle/index.ts @@ -9,6 +9,7 @@ import { computeContractClassIdPreimage, computeSaltedInitializationHash, } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; import { type Tuple } from '@aztec/foundation/serialize'; import { type ContractDataOracle } from '../contract_data_oracle/index.js'; @@ -20,7 +21,12 @@ import { type ProvingDataOracle } from './../kernel_prover/proving_data_oracle.j * A data oracle that provides information needed for simulating a transaction. */ export class KernelOracle implements ProvingDataOracle { - constructor(private contractDataOracle: ContractDataOracle, private keyStore: KeyStore, private node: AztecNode) {} + constructor( + private contractDataOracle: ContractDataOracle, + private keyStore: KeyStore, + private node: AztecNode, + private log = createDebugLogger('aztec:pxe:kernel_oracle'), + ) {} public async getContractAddressPreimage(address: AztecAddress) { const instance = await this.contractDataOracle.getContractInstance(address); @@ -64,4 +70,20 @@ export class KernelOracle implements ProvingDataOracle { public getMasterNullifierSecretKey(nullifierPublicKey: Point) { return this.keyStore.getMasterNullifierSecretKeyForPublicKey(nullifierPublicKey); } + + public async getFunctionName(contractAddress: AztecAddress, selector: FunctionSelector): Promise { + try { + const contractInstance = await this.contractDataOracle.getContractInstance(contractAddress); + + const [contractArtifact, functionArtifact] = await Promise.all([ + this.contractDataOracle.getContractArtifact(contractInstance.contractClassId), + this.contractDataOracle.getFunctionArtifact(contractAddress, selector), + ]); + + return `${contractArtifact.name}:${functionArtifact.name}`; + } catch (e) { + this.log.error(`Failed to get function name for contract ${contractAddress} and selector ${selector}: ${e}`); + return 'Unknown'; + } + } } diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 99983a3bfb3..efbfafffe61 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -77,6 +77,7 @@ describe('Kernel Prover', () => { partialWitness: new Map(), enqueuedPublicFunctionCalls: [], publicTeardownFunctionCall: PublicCallRequest.empty(), + noteEncryptedLogs: [], encryptedLogs: [], unencryptedLogs: [], }; diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index 5d3da7712c4..b9c74e48f36 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -84,9 +84,15 @@ export class KernelProver { ? CallRequest.empty() : currentExecution.publicTeardownFunctionCall.toCallRequest(); + const functionName = await this.oracle.getFunctionName( + currentExecution.callStackItem.contractAddress, + currentExecution.callStackItem.functionData.selector, + ); + const proofOutput = await this.proofCreator.createAppCircuitProof( currentExecution.partialWitness, currentExecution.acir, + functionName, ); const privateCallData = await this.createPrivateCallData( @@ -140,6 +146,7 @@ export class KernelProver { const expectedOutputs = buildPrivateKernelResetOutputs( output.publicInputs.end.newNoteHashes, output.publicInputs.end.newNullifiers, + output.publicInputs.end.noteEncryptedLogsHashes, ); output = await this.proofCreator.createProofReset( diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_hints.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_hints.ts index a38f81f292b..1449ceec2ac 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_hints.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_hints.ts @@ -3,6 +3,7 @@ import { GrumpkinScalar, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, type MAX_NULLIFIER_READ_REQUESTS_PER_TX, MembershipWitness, @@ -87,16 +88,23 @@ export async function buildPrivateKernelResetHints( oracle, ); - const [transientNullifierIndexesForNoteHashes, transientNoteHashIndexesForNullifiers] = buildTransientDataHints( + const [ + transientNullifierIndexesForNoteHashes, + transientNoteHashIndexesForNullifiers, + transientNoteHashIndexesForLogs, + ] = buildTransientDataHints( publicInputs.end.newNoteHashes, publicInputs.end.newNullifiers, + publicInputs.end.noteEncryptedLogsHashes, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, ); return new PrivateKernelResetHints( transientNullifierIndexesForNoteHashes, transientNoteHashIndexesForNullifiers, + transientNoteHashIndexesForLogs, noteHashReadRequestHints, nullifierReadRequestHints, masterNullifierSecretKeys, diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_outputs.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_outputs.ts index e2b80f6f0f2..7278cf72e80 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_outputs.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_reset_outputs.ts @@ -1,6 +1,8 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + NoteLogHash, PrivateKernelResetOutputs, ScopedNoteHash, ScopedNullifier, @@ -11,6 +13,7 @@ import { type Tuple } from '@aztec/foundation/serialize'; export function buildPrivateKernelResetOutputs( prevNoteHashes: Tuple, prevNullifiers: Tuple, + prevLogs: Tuple, ) { // Propagate note hashes that are not linked to a nullifier. // Note that note hashes can't link to the first nullifier (counter == 0). @@ -26,5 +29,13 @@ export function buildPrivateKernelResetOutputs( MAX_NEW_NULLIFIERS_PER_TX, ); - return new PrivateKernelResetOutputs(noteHashes, nullifiers); + const nullifiedNotes = prevNoteHashes.filter(n => !n.isEmpty() && n.nullifierCounter).map(n => n.counter); + + const logs = padArrayEnd( + prevLogs.filter(l => !l.isEmpty() && !nullifiedNotes.includes(l.noteHashCounter)), + NoteLogHash.empty(), + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + ); + + return new PrivateKernelResetOutputs(noteHashes, nullifiers, logs); } diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts index 8b30244e83b..4a0781fdb8a 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts @@ -1,37 +1,13 @@ import { - type MAX_ENCRYPTED_LOGS_PER_TX, + MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, - type MAX_UNENCRYPTED_LOGS_PER_TX, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + MAX_UNENCRYPTED_LOGS_PER_TX, type PrivateKernelCircuitPublicInputs, PrivateKernelTailHints, - type SideEffect, - type SideEffectType, sortByCounterGetSortedHints, } from '@aztec/circuits.js'; -import { type Tuple } from '@aztec/foundation/serialize'; - -/** @deprecated Use sortByCounterGetSortedHints instead */ -function sortSideEffects( - sideEffects: Tuple, -): [Tuple, Tuple] { - const sorted = sideEffects - .map((sideEffect, index) => ({ sideEffect, index })) - .sort((a, b) => { - // Empty ones go to the right - if (a.sideEffect.isEmpty()) { - return 1; - } - return Number(a.sideEffect.counter.toBigInt() - b.sideEffect.counter.toBigInt()); - }); - - const originalToSorted = sorted.map(() => 0); - sorted.forEach(({ index }, i) => { - originalToSorted[index] = i; - }); - - return [sorted.map(({ sideEffect }) => sideEffect) as Tuple, originalToSorted as Tuple]; -} export function buildPrivateKernelTailHints(publicInputs: PrivateKernelCircuitPublicInputs) { const [sortedNoteHashes, sortedNoteHashesIndexes] = sortByCounterGetSortedHints( @@ -44,21 +20,28 @@ export function buildPrivateKernelTailHints(publicInputs: PrivateKernelCircuitPu MAX_NEW_NULLIFIERS_PER_TX, ); - const [sortedEncryptedLogHashes, sortedEncryptedLogHashesIndexes] = sortSideEffects< - SideEffect, - typeof MAX_ENCRYPTED_LOGS_PER_TX - >(publicInputs.end.encryptedLogsHashes); + const [sortedNoteEncryptedLogHashes, sortedNoteEncryptedLogHashesIndexes] = sortByCounterGetSortedHints( + publicInputs.end.noteEncryptedLogsHashes, + MAX_NOTE_ENCRYPTED_LOGS_PER_TX, + ); + + const [sortedEncryptedLogHashes, sortedEncryptedLogHashesIndexes] = sortByCounterGetSortedHints( + publicInputs.end.encryptedLogsHashes, + MAX_ENCRYPTED_LOGS_PER_TX, + ); - const [sortedUnencryptedLogHashes, sortedUnencryptedLogHashesIndexes] = sortSideEffects< - SideEffect, - typeof MAX_UNENCRYPTED_LOGS_PER_TX - >(publicInputs.end.unencryptedLogsHashes); + const [sortedUnencryptedLogHashes, sortedUnencryptedLogHashesIndexes] = sortByCounterGetSortedHints( + publicInputs.end.unencryptedLogsHashes, + MAX_UNENCRYPTED_LOGS_PER_TX, + ); return new PrivateKernelTailHints( sortedNoteHashes, sortedNoteHashesIndexes, sortedNullifiers, sortedNullifiersIndexes, + sortedNoteEncryptedLogHashes, + sortedNoteEncryptedLogHashesIndexes, sortedEncryptedLogHashes, sortedEncryptedLogHashesIndexes, sortedUnencryptedLogHashes, diff --git a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts index 04af3cad3ed..f1dc0b39dda 100644 --- a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts +++ b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts @@ -76,4 +76,6 @@ export interface ProvingDataOracle { * @returns the master nullifier secret key. */ getMasterNullifierSecretKey(nullifierPublicKey: Point): Promise; + + getFunctionName(contractAddress: AztecAddress, selector: FunctionSelector): Promise; } diff --git a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts index ce5ce55a1cf..ff919257d4b 100644 --- a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts @@ -89,7 +89,7 @@ export class TestProofCreator implements ProofCreator { ); this.log.debug(`Simulated private kernel ordering`, { eventName: 'circuit-simulation', - circuitName: 'private-kernel-ordering', + circuitName: 'private-kernel-tail', duration, inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, diff --git a/yarn-project/pxe/src/note_processor/produce_note_dao.ts b/yarn-project/pxe/src/note_processor/produce_note_dao.ts index f22d17f63eb..d5de1d4dea9 100644 --- a/yarn-project/pxe/src/note_processor/produce_note_dao.ts +++ b/yarn-project/pxe/src/note_processor/produce_note_dao.ts @@ -107,25 +107,9 @@ async function findNoteIndexAndNullifier( } if (!nonce) { - let errorString; - if (siloedNoteHash == undefined) { - errorString = 'Cannot find a matching commitment for the note.'; - } else { - errorString = `We decrypted a log, but couldn't find a corresponding note in the tree. -This might be because the note was nullified in the same tx which created it. -In that case, everything is fine. To check whether this is the case, look back through -the logs for a notification -'important: chopped commitment for siloed inner hash note -${siloedNoteHash.toString()}'. -If you can see that notification. Everything's fine. -If that's not the case, and you can't find such a notification, something has gone wrong. -There could be a problem with the way you've defined a custom note, or with the way you're -serializing / deserializing / hashing / encrypting / decrypting that note. -Please see the following github issue to track an improvement that we're working on: -https://github.com/AztecProtocol/aztec-packages/issues/1641`; - } - - throw new Error(errorString); + // NB: this used to warn the user that a decrypted log didn't match any notes. + // This was previously fine as we didn't chop transient note logs, but now we do (#1641 complete). + throw new Error('Cannot find a matching commitment for the note.'); } return { diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 47e0bf633d1..2a33dd3dd66 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -1,6 +1,7 @@ import { BBNativeProofCreator } from '@aztec/bb-prover'; import { type AztecNode, type ProofCreator } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; import { TestKeyStore } from '@aztec/key-store'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { initStoreForRollup } from '@aztec/kv-store/utils'; @@ -54,7 +55,11 @@ export async function createPXEService( } prover = !config.proverEnabled ? new TestProofCreator() - : new BBNativeProofCreator(config.bbBinaryPath!, config.bbWorkingDirectory!); + : new BBNativeProofCreator( + config.bbBinaryPath!, + config.bbWorkingDirectory!, + createDebugLogger('aztec:pxe:bb-native-prover' + (logSuffix ? `:${logSuffix}` : '')), + ); } const server = new PXEService(keyStore, aztecNode, db, prover, config, logSuffix); diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index cababdd6772..91c00795841 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -38,7 +38,7 @@ import { import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { arrayNonEmptyLength, padArrayEnd } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/fields'; +import { Fq, Fr } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; @@ -48,6 +48,7 @@ import { collectEnqueuedPublicFunctionCalls, collectPublicTeardownFunctionCall, collectSortedEncryptedLogs, + collectSortedNoteEncryptedLogs, collectSortedUnencryptedLogs, resolveOpcodeLocations, } from '@aztec/simulator'; @@ -191,6 +192,10 @@ export class PXEService implements PXE { return accountCompleteAddress; } + public async rotateMasterNullifierKey(account: AztecAddress, secretKey: Fq = Fq.random()): Promise { + await this.keyStore.rotateMasterNullifierKey(account, secretKey); + } + public async getRegisteredAccounts(): Promise { // Get complete addresses of both the recipients and the accounts const completeAddresses = await this.db.getCompleteAddresses(); @@ -657,6 +662,7 @@ export class PXEService implements PXE { this.log.debug(`Executing kernel prover...`); const { proof, publicInputs } = await kernelProver.prove(txExecutionRequest.toTxRequest(), executionResult); + const noteEncryptedLogs = new EncryptedTxL2Logs([collectSortedNoteEncryptedLogs(executionResult)]); const unencryptedLogs = new UnencryptedTxL2Logs([collectSortedUnencryptedLogs(executionResult)]); const encryptedLogs = new EncryptedTxL2Logs([collectSortedEncryptedLogs(executionResult)]); const enqueuedPublicFunctions = collectEnqueuedPublicFunctionCalls(executionResult); @@ -669,6 +675,7 @@ export class PXEService implements PXE { const tx = new Tx( publicInputs, proof.binaryProof, + noteEncryptedLogs, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index e598a1c49e9..0aafc050919 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -37,17 +37,17 @@ export class SimulatorOracle implements DBOracle { private log = createDebugLogger('aztec:pxe:simulator_oracle'), ) {} - async getNullifierKeys(accountOrNpkMHash: AztecAddress | Fr, contractAddress: AztecAddress): Promise { - const masterNullifierPublicKey = await this.keyStore.getMasterNullifierPublicKey(accountOrNpkMHash); - const appNullifierSecretKey = await this.keyStore.getAppNullifierSecretKey(accountOrNpkMHash, contractAddress); + async getNullifierKeys(npkMHash: Fr, contractAddress: AztecAddress): Promise { + const masterNullifierPublicKey = await this.keyStore.getMasterNullifierPublicKey(npkMHash); + const appNullifierSecretKey = await this.keyStore.getAppNullifierSecretKey(npkMHash, contractAddress); return { masterNullifierPublicKey, appNullifierSecretKey }; } - async getCompleteAddress(accountOrNpkMHash: AztecAddress | Fr): Promise { - const completeAddress = await this.db.getCompleteAddress(accountOrNpkMHash); + async getCompleteAddress(account: AztecAddress): Promise { + const completeAddress = await this.db.getCompleteAddress(account); if (!completeAddress) { throw new Error( - `No public key registered for address or master nullifier public key hash ${accountOrNpkMHash}. + `No public key registered for address ${account}. Register it by calling pxe.registerRecipient(...) or pxe.registerAccount(...).\nSee docs for context: https://docs.aztec.network/developers/debugging/aztecnr-errors#simulation-error-No-public-key-registered-for-address-0x0-Register-it-by-calling-pxeregisterRecipient-or-pxeregisterAccount`, ); } diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 527fe3d9a47..f85a7cf3a7d 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -105,18 +105,18 @@ export class Synchronizer { return false; } - const encryptedLogs = blocks.flatMap(block => block.body.encryptedLogs); + const noteEncryptedLogs = blocks.flatMap(block => block.body.noteEncryptedLogs); // Update latest tree roots from the most recent block const latestBlock = blocks[blocks.length - 1]; await this.setHeaderFromBlock(latestBlock); - const logCount = L2BlockL2Logs.getTotalLogCount(encryptedLogs); + const logCount = L2BlockL2Logs.getTotalLogCount(noteEncryptedLogs); this.log.debug( `Forwarding ${logCount} encrypted logs and blocks to ${this.noteProcessors.length} note processors`, ); for (const noteProcessor of this.noteProcessors) { - await noteProcessor.process(blocks, encryptedLogs); + await noteProcessor.process(blocks, noteEncryptedLogs); } return true; } catch (err) { @@ -182,9 +182,9 @@ export class Synchronizer { throw new Error('No blocks in processor catch up mode'); } - const encryptedLogs = blocks.flatMap(block => block.body.encryptedLogs); + const noteEncryptedLogs = blocks.flatMap(block => block.body.noteEncryptedLogs); - const logCount = L2BlockL2Logs.getTotalLogCount(encryptedLogs); + const logCount = L2BlockL2Logs.getTotalLogCount(noteEncryptedLogs); this.log.debug(`Forwarding ${logCount} encrypted logs and blocks to note processors in catch up mode`); for (const noteProcessor of catchUpGroup) { @@ -202,7 +202,7 @@ export class Synchronizer { blocks.length - index } blocks`, ); - await noteProcessor.process(blocks.slice(index), encryptedLogs.slice(index)); + await noteProcessor.process(blocks.slice(index), noteEncryptedLogs.slice(index)); if (noteProcessor.status.syncedToBlock === toBlockNumber) { // Note processor caught up, move it to `noteProcessors` from `noteProcessorsToCatchUp`. diff --git a/yarn-project/scripts/src/benchmarks/aggregate.ts b/yarn-project/scripts/src/benchmarks/aggregate.ts index f0bf75811f9..4d2a35f559f 100644 --- a/yarn-project/scripts/src/benchmarks/aggregate.ts +++ b/yarn-project/scripts/src/benchmarks/aggregate.ts @@ -97,13 +97,20 @@ function processRollupBlockSynced(entry: L2BlockHandledStats, results: Benchmark * Buckets are circuit names */ function processCircuitSimulation(entry: CircuitSimulationStats, results: BenchmarkCollectedResults) { - const bucket = entry.circuitName; - if (!bucket) { - return; + if (entry.circuitName === 'app-circuit') { + const bucket = entry.appCircuitName; + if (!bucket) { + return; + } + append(results, 'app_circuit_simulation_time_in_ms', bucket, entry.duration); + append(results, 'app_circuit_input_size_in_bytes', bucket, entry.inputSize); + append(results, 'app_circuit_output_size_in_bytes', bucket, entry.outputSize); + } else { + const bucket = entry.circuitName; + append(results, 'protocol_circuit_simulation_time_in_ms', bucket, entry.duration); + append(results, 'protocol_circuit_input_size_in_bytes', bucket, entry.inputSize); + append(results, 'protocol_circuit_output_size_in_bytes', bucket, entry.outputSize); } - append(results, 'circuit_simulation_time_in_ms', bucket, entry.duration); - append(results, 'circuit_input_size_in_bytes', bucket, entry.inputSize); - append(results, 'circuit_output_size_in_bytes', bucket, entry.outputSize); } /** @@ -111,12 +118,22 @@ function processCircuitSimulation(entry: CircuitSimulationStats, results: Benchm * Buckets are circuit names */ function processCircuitProving(entry: CircuitProvingStats, results: BenchmarkCollectedResults) { - const bucket = entry.circuitName; - if (!bucket) { - return; + if (entry.circuitName === 'app-circuit') { + const bucket = entry.appCircuitName; + if (!bucket) { + return; + } + append(results, 'app_circuit_proving_time_in_ms', bucket, entry.duration); + append(results, 'app_circuit_proof_size_in_bytes', bucket, entry.proofSize); + append(results, 'app_circuit_size_in_gates', bucket, entry.circuitSize); + append(results, 'app_circuit_num_public_inputs', bucket, entry.numPublicInputs); + } else { + const bucket = entry.circuitName; + append(results, 'protocol_circuit_proving_time_in_ms', bucket, entry.duration); + append(results, 'protocol_circuit_proof_size_in_bytes', bucket, entry.proofSize); + append(results, 'protocol_circuit_size_in_gates', bucket, entry.circuitSize); + append(results, 'protocol_circuit_num_public_inputs', bucket, entry.numPublicInputs); } - append(results, 'circuit_proving_time_in_ms', bucket, entry.duration); - append(results, 'circuit_proof_size_in_bytes', bucket, entry.proofSize); } /** @@ -124,11 +141,16 @@ function processCircuitProving(entry: CircuitProvingStats, results: BenchmarkCol * Buckets are circuit names */ function processCircuitWitnessGeneration(entry: CircuitWitnessGenerationStats, results: BenchmarkCollectedResults) { - const bucket = entry.circuitName; - if (!bucket) { - return; + if (entry.circuitName === 'app-circuit') { + const bucket = entry.appCircuitName; + if (!bucket) { + return; + } + append(results, 'app_circuit_witness_generation_time_in_ms', bucket, entry.duration); + } else { + const bucket = entry.circuitName; + append(results, 'protocol_circuit_witness_generation_time_in_ms', bucket, entry.duration); } - append(results, 'circuit_witness_generation_time_in_ms', bucket, entry.duration); } /** * Processes an entry with event name 'note-processor-caught-up' and updates results diff --git a/yarn-project/scripts/src/benchmarks/markdown.ts b/yarn-project/scripts/src/benchmarks/markdown.ts index 772238679a9..5730067f7c5 100644 --- a/yarn-project/scripts/src/benchmarks/markdown.ts +++ b/yarn-project/scripts/src/benchmarks/markdown.ts @@ -185,7 +185,8 @@ export function getMarkdown(prNumber: number) { const metricsByBlockSize = Metrics.filter(m => m.groupBy === 'block-size').map(m => m.name); const metricsByChainLength = Metrics.filter(m => m.groupBy === 'chain-length').map(m => m.name); - const metricsByCircuitName = Metrics.filter(m => m.groupBy === 'circuit-name').map(m => m.name); + const kernelCircuitMetrics = Metrics.filter(m => m.groupBy === 'protocol-circuit-name').map(m => m.name); + const appCircuitMetrics = Metrics.filter(m => m.groupBy === 'app-circuit-name').map(m => m.name); const metricsByClassesRegistered = Metrics.filter(m => m.groupBy === 'classes-registered').map(m => m.name); const metricsByFeePaymentMethod = Metrics.filter(m => m.groupBy === 'fee-payment-method').map(m => m.name); const metricsByLeafCount = Metrics.filter(m => m.groupBy === 'leaf-count').map(m => m.name); @@ -229,8 +230,11 @@ ${getTableContent(pick(benchmark, metricsByChainLength), baseBenchmark, 'blocks' ### Circuits stats -Stats on running time and I/O sizes collected for every circuit run across all benchmarks. -${getTableContent(transpose(pick(benchmark, metricsByCircuitName)), transpose(baseBenchmark), '', 'Circuit')} +Stats on running time and I/O sizes collected for every kernel circuit run across all benchmarks. +${getTableContent(transpose(pick(benchmark, kernelCircuitMetrics)), transpose(baseBenchmark), '', 'Circuit')} + +Stats on running time collected for app circuits +${getTableContent(transpose(pick(benchmark, appCircuitMetrics)), transpose(baseBenchmark), '', 'Function')} ### Tree insertion stats diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index 400203a085b..9def84c9f12 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -40,19 +40,7 @@ export class Oracle { return unpacked.map(toACVMField); } - async getNullifierKeys([accountAddress]: ACVMField[]): Promise { - const { masterNullifierPublicKey, appNullifierSecretKey } = await this.typedOracle.getNullifierKeys( - fromACVMField(accountAddress), - ); - return [ - toACVMField(masterNullifierPublicKey.x), - toACVMField(masterNullifierPublicKey.y), - toACVMField(appNullifierSecretKey), - ]; - } - - // Keeping this oracle separate from above because I don't want an implicit overload in noir code - async getNullifierKeysWithNpkMHash([masterNullifierPublicKeyHash]: ACVMField[]): Promise { + async getNullifierKeys([masterNullifierPublicKeyHash]: ACVMField[]): Promise { const { masterNullifierPublicKey, appNullifierSecretKey } = await this.typedOracle.getNullifierKeys( fromACVMField(masterNullifierPublicKeyHash), ); @@ -182,14 +170,6 @@ export class Oracle { return [...publicKeys.toFields(), partialAddress].map(toACVMField); } - // Keeping this oracle separate from above because I don't want an implicit overload in noir code - async getPublicKeysAndPartialAddressWithNpkMHash([masterNullifierPublicKeyHash]: ACVMField[]) { - const parsedNpkMHash = fromACVMField(masterNullifierPublicKeyHash); - const { publicKeys, partialAddress } = await this.typedOracle.getCompleteAddress(parsedNpkMHash); - - return [...publicKeys.toFields(), partialAddress].map(toACVMField); - } - async getNotes( [storageSlot]: ACVMField[], [numSelects]: ACVMField[], diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 7da764e01ff..4a910040384 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -89,7 +89,7 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('unpackReturns'); } - getNullifierKeys(_accountOrNpkMHash: AztecAddress | Fr): Promise { + getNullifierKeys(_npkMHash: Fr): Promise { throw new OracleMethodNotAvailableError('getNullifierKeys'); } @@ -124,7 +124,7 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getHeader'); } - getCompleteAddress(_accountOrNpkMHash: AztecAddress | Fr): Promise { + getCompleteAddress(_account: AztecAddress): Promise { throw new OracleMethodNotAvailableError('getCompleteAddress'); } diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 4190e3eaeae..c692284d1c4 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -10,7 +10,7 @@ import { AvmNestedCallsTestContractArtifact, AvmTestContractArtifact } from '@az import { jest } from '@jest/globals'; import { strict as assert } from 'assert'; -import { isAvmBytecode } from '../public/transitional_adaptors.js'; +import { isAvmBytecode, markBytecodeAsAvm } from '../public/transitional_adaptors.js'; import { AvmMachineState } from './avm_machine_state.js'; import { type MemoryValue, TypeTag, type Uint8 } from './avm_memory_types.js'; import { AvmSimulator } from './avm_simulator.js'; @@ -39,14 +39,14 @@ describe('AVM simulator: injected bytecode', () => { ]); }); - it('Should not be recognized as AVM bytecode (magic missing)', () => { - expect(!isAvmBytecode(bytecode)); + it('Should not be recognized as AVM bytecode (magic missing)', async () => { + expect(!(await isAvmBytecode(bytecode))); }); it('Should execute bytecode that performs basic addition', async () => { const context = initContext({ env: initExecutionEnvironment({ calldata }) }); const { l2GasLeft: initialL2GasLeft } = AvmMachineState.fromState(context.machineState); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const results = await new AvmSimulator(context).executeBytecode(markBytecodeAsAvm(bytecode)); expect(results.reverted).toBe(false); expect(results.output).toEqual([new Fr(3)]); @@ -59,7 +59,7 @@ describe('AVM simulator: injected bytecode', () => { machineState: initMachineState({ l2GasLeft: 5 }), }); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const results = await new AvmSimulator(context).executeBytecode(markBytecodeAsAvm(bytecode)); expect(results.reverted).toBe(true); expect(results.output).toEqual([]); expect(results.revertReason?.message).toEqual('Not enough L2GAS gas left'); @@ -91,9 +91,9 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.output).toEqual([new Fr(0)]); }); - it('Should be recognized as AVM bytecode (magic present)', () => { + it('Should be recognized as AVM bytecode (magic present)', async () => { const bytecode = getAvmTestContractBytecode('add_args_return'); - expect(isAvmBytecode(bytecode)); + expect(await isAvmBytecode(bytecode)); }); describe('U128 addition and overflows', () => { diff --git a/yarn-project/simulator/src/avm/avm_simulator.ts b/yarn-project/simulator/src/avm/avm_simulator.ts index 428dcb624d2..65ea246b10e 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.ts @@ -2,7 +2,7 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { strict as assert } from 'assert'; -import { isAvmBytecode } from '../public/transitional_adaptors.js'; +import { decompressBytecodeIfCompressed, isAvmBytecode } from '../public/transitional_adaptors.js'; import type { AvmContext } from './avm_context.js'; import { AvmContractCallResults } from './avm_message_call_result.js'; import { @@ -39,7 +39,6 @@ export class AvmSimulator { if (!bytecode) { throw new NoBytecodeForContractError(this.context.environment.address); } - assert(isAvmBytecode(bytecode), "AVM simulator can't execute non-AVM bytecode"); return await this.executeBytecode(bytecode); } @@ -49,7 +48,10 @@ export class AvmSimulator { * This method is useful for testing and debugging. */ public async executeBytecode(bytecode: Buffer): Promise { - return await this.executeInstructions(decodeFromBytecode(bytecode)); + const decompressedBytecode = await decompressBytecodeIfCompressed(bytecode); + assert(isAvmBytecode(decompressedBytecode), "AVM simulator can't execute non-AVM bytecode"); + + return await this.executeInstructions(decodeFromBytecode(decompressedBytecode)); } /** diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index 2ada0075680..8616e8d5feb 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -90,8 +90,8 @@ export function initGlobalVariables(overrides?: Partial): Globa */ export function initMachineState(overrides?: Partial): AvmMachineState { return AvmMachineState.fromState({ - l2GasLeft: overrides?.l2GasLeft ?? 100e6, - daGasLeft: overrides?.daGasLeft ?? 100e6, + l2GasLeft: overrides?.l2GasLeft ?? 1e8, + daGasLeft: overrides?.daGasLeft ?? 1e8, }); } diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 94f764409bd..3017e1a7eaf 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -6,10 +6,10 @@ import { ContractStorageUpdateRequest, EthAddress, L2ToL1Message, + LogHash, NoteHash, Nullifier, ReadRequest, - SideEffect, } from '@aztec/circuits.js'; import { EventSelector } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; @@ -64,7 +64,7 @@ type PartialPublicExecutionResult = { newNullifiers: Nullifier[]; contractStorageReads: ContractStorageRead[]; contractStorageUpdateRequests: ContractStorageUpdateRequest[]; - unencryptedLogsHashes: SideEffect[]; + unencryptedLogsHashes: LogHash[]; unencryptedLogs: UnencryptedL2Log[]; unencryptedLogPreimagesLength: Fr; allUnencryptedLogs: UnencryptedL2Log[]; @@ -119,8 +119,7 @@ export class AvmPersistableStateManager { contractStorageUpdateRequests: [], unencryptedLogsHashes: [], unencryptedLogs: [], - // The length starts at 4 because it will always include the size. - unencryptedLogPreimagesLength: new Fr(4), + unencryptedLogPreimagesLength: Fr.ZERO, allUnencryptedLogs: [], nestedExecutions: [], }; @@ -311,12 +310,14 @@ export class AvmPersistableStateManager { this.transitionalExecutionResult.allUnencryptedLogs.push(ulog); // this duplicates exactly what happens in the trace just for the purpose of transitional integration with the kernel this.transitionalExecutionResult.unencryptedLogsHashes.push( - new SideEffect(logHash, new Fr(this.trace.accessCounter)), + new LogHash(logHash, this.trace.accessCounter, new Fr(ulog.length)), ); // Duplicates computation performed in public_context.nr::emit_unencrypted_log // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4). - this.transitionalExecutionResult.unencryptedLogPreimagesLength = new Fr( - this.transitionalExecutionResult.unencryptedLogPreimagesLength.toNumber() + 44 + log.length * Fr.SIZE_IN_BYTES, + // Note that ulog.length includes all the above bytes apart from processed log len + // Processed log len is added to replicate conversion to function_l2_logs at the end of exec. + this.transitionalExecutionResult.unencryptedLogPreimagesLength = new Fr(ulog.length + 4).add( + this.transitionalExecutionResult.unencryptedLogPreimagesLength, ); // TODO(6206): likely need to track this here and not just in the transitional logic. diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index 01dcade9354..6dd086bc78d 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -11,6 +11,7 @@ import { adjustCalldataIndex, initContext } from '../fixtures/index.js'; import { HostStorage } from '../journal/host_storage.js'; import { AvmPersistableStateManager } from '../journal/journal.js'; import { encodeToBytecode } from '../serialization/bytecode_serialization.js'; +import { L2GasLeft } from './context_getters.js'; import { Call, Return, Revert, StaticCall } from './external_calls.js'; import { type Instruction } from './instruction.js'; import { CalldataCopy } from './memory.js'; @@ -40,7 +41,7 @@ describe('External Calls', () => { ...Buffer.from('d2345678', 'hex'), // retOffset ...Buffer.from('e2345678', 'hex'), // retSize ...Buffer.from('f2345678', 'hex'), // successOffset - ...Buffer.from('f3345678', 'hex'), // temporaryFunctionSelectorOffset + ...Buffer.from('f3345678', 'hex'), // functionSelectorOffset ]); const inst = new Call( /*indirect=*/ 0x01, @@ -51,7 +52,7 @@ describe('External Calls', () => { /*retOffset=*/ 0xd2345678, /*retSize=*/ 0xe2345678, /*successOffset=*/ 0xf2345678, - /*temporaryFunctionSelectorOffset=*/ 0xf3345678, + /*functionSelectorOffset=*/ 0xf3345678, ); expect(Call.deserialize(buf)).toEqual(inst); @@ -86,8 +87,7 @@ describe('External Calls', () => { ]), ); - // const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; - const { daGasLeft: initialDaGas } = context.machineState; + const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; context.machineState.memory.set(0, new Field(l2Gas)); context.machineState.memory.set(1, new Field(daGas)); @@ -107,7 +107,7 @@ describe('External Calls', () => { retOffset, retSize, successOffset, - /*temporaryFunctionSelectorOffset=*/ 0, + /*functionSelectorOffset=*/ 0, ); await instruction.execute(context); @@ -128,48 +128,60 @@ describe('External Calls', () => { const expectedStoredValue = new Fr(1n); expect(nestedContractWrites!.get(slotNumber)).toEqual(expectedStoredValue); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/5625): gas not plumbed through correctly in nested calls. - // expect(context.machineState.l2GasLeft).toEqual(initialL2Gas - otherContextInstructionsL2GasCost); + expect(context.machineState.l2GasLeft).toBeLessThan(initialL2Gas); expect(context.machineState.daGasLeft).toEqual(initialDaGas); }); - it('Should refuse to execute a call if not enough gas', async () => { + it('Should cap to available gas if allocated is bigger', async () => { const gasOffset = 0; const l2Gas = 1e9; - const daGas = 3e6; + const daGas = 1e9; const addrOffset = 2; const addr = new Fr(123456n); - const argsOffset = 3; - const args = [new Field(1n), new Field(2n), new Field(3n)]; - const argsSize = args.length; + const argsSize = 0; const argsSizeOffset = 20; const retOffset = 7; - const retSize = 2; + const retSize = 1; const successOffset = 6; + const otherContextInstructionsBytecode = markBytecodeAsAvm( + encodeToBytecode([ + new L2GasLeft(/*indirect=*/ 0, /*dstOffset=*/ 0), + new Return(/*indirect=*/ 0, /*retOffset=*/ 0, /*size=*/ 1), + ]), + ); + + const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; + context.machineState.memory.set(0, new Field(l2Gas)); context.machineState.memory.set(1, new Field(daGas)); context.machineState.memory.set(2, new Field(addr)); context.machineState.memory.set(argsSizeOffset, new Uint32(argsSize)); - context.machineState.memory.setSlice(3, args); - jest .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockRejectedValue(new Error('No bytecode expected to be requested since not enough gas')); + .mockReturnValue(Promise.resolve(otherContextInstructionsBytecode)); const instruction = new Call( /*indirect=*/ 0, gasOffset, addrOffset, - argsOffset, + /*argsOffset=*/ 0, argsSizeOffset, retOffset, retSize, successOffset, - /*temporaryFunctionSelectorOffset=*/ 0, + /*functionSelectorOffset=*/ 0, ); + await instruction.execute(context); - await expect(() => instruction.execute(context)).rejects.toThrow(/Not enough.*gas left/i); + const successValue = context.machineState.memory.get(successOffset); + expect(successValue).toEqual(new Uint8(1n)); + + const retValue = context.machineState.memory.get(retOffset).toBigInt(); + expect(retValue).toBeLessThan(initialL2Gas); + + expect(context.machineState.l2GasLeft).toBeLessThan(initialL2Gas); + expect(context.machineState.daGasLeft).toEqual(initialDaGas); }); }); @@ -185,7 +197,7 @@ describe('External Calls', () => { ...Buffer.from('d2345678', 'hex'), // retOffset ...Buffer.from('e2345678', 'hex'), // retSize ...Buffer.from('f2345678', 'hex'), // successOffset - ...Buffer.from('f3345678', 'hex'), // temporaryFunctionSelectorOffset + ...Buffer.from('f3345678', 'hex'), // functionSelectorOffset ]); const inst = new StaticCall( /*indirect=*/ 0x01, @@ -196,7 +208,7 @@ describe('External Calls', () => { /*retOffset=*/ 0xd2345678, /*retSize=*/ 0xe2345678, /*successOffset=*/ 0xf2345678, - /*temporaryFunctionSelectorOffset=*/ 0xf3345678, + /*functionSelectorOffset=*/ 0xf3345678, ); expect(StaticCall.deserialize(buf)).toEqual(inst); @@ -241,7 +253,7 @@ describe('External Calls', () => { retOffset, retSize, successOffset, - /*temporaryFunctionSelectorOffset=*/ 0, + /*functionSelectorOffset=*/ 0, ); await expect(() => instruction.execute(context)).rejects.toThrow( 'Static call cannot update the state, emit L2->L1 messages or generate logs', diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.ts index 21d96882ed9..8ffcce7215b 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.ts @@ -3,7 +3,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { convertAvmResultsToPxResult, createPublicExecution } from '../../public/transitional_adaptors.js'; import type { AvmContext } from '../avm_context.js'; -import { gasLeftToGas, sumGas } from '../avm_gas.js'; +import { gasLeftToGas } from '../avm_gas.js'; import { Field, Uint8 } from '../avm_memory_types.js'; import { type AvmContractCallResults } from '../avm_message_call_result.js'; import { AvmSimulator } from '../avm_simulator.js'; @@ -57,16 +57,23 @@ abstract class ExternalCall extends Instruction { const callAddress = memory.getAs(addrOffset); const calldataSize = memory.get(argsSizeOffset).toNumber(); const calldata = memory.getSlice(argsOffset, calldataSize).map(f => f.toFr()); - const l2Gas = memory.get(gasOffset).toNumber(); - const daGas = memory.getAs(gasOffset + 1).toNumber(); const functionSelector = memory.getAs(this.functionSelectorOffset).toFr(); // If we are already in a static call, we propagate the environment. const callType = context.environment.isStaticCall ? 'STATICCALL' : this.type; - const allocatedGas = { l2Gas, daGas }; + // First we consume the gas for this operation. const memoryOperations = { reads: calldataSize + 5, writes: 1 + this.retSize, indirect: this.indirect }; - const totalGas = sumGas(this.gasCost(memoryOperations), allocatedGas); - context.machineState.consumeGas(totalGas); + context.machineState.consumeGas(this.gasCost(memoryOperations)); + // Then we consume the gas allocated for the nested call. The excess will be refunded later. + // Gas allocation is capped by the amount of gas left in the current context. + // We have to do some dancing here because the gas allocation is a field, + // but in the machine state we track gas as a number. + const allocatedL2Gas = Number(BigIntMin(memory.get(gasOffset).toBigInt(), BigInt(context.machineState.l2GasLeft))); + const allocatedDaGas = Number( + BigIntMin(memory.get(gasOffset + 1).toBigInt(), BigInt(context.machineState.daGasLeft)), + ); + const allocatedGas = { l2Gas: allocatedL2Gas, daGas: allocatedDaGas }; + context.machineState.consumeGas(allocatedGas); // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit const nestedContext = context.createNestedContractCallContext( @@ -217,3 +224,8 @@ export class Revert extends Instruction { memory.assert(memoryOperations); } } + +/** Returns the smaller of two bigints. */ +function BigIntMin(a: bigint, b: bigint): bigint { + return a < b ? a : b; +} diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 6b381dd723c..6b5e75207ec 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -1,14 +1,13 @@ import { type AuthWitness, type AztecNode, - EncryptedFunctionL2Logs, EncryptedL2Log, L1NotePayload, Note, type NoteStatus, TaggedNote, - UnencryptedFunctionL2Logs, type UnencryptedL2Log, + encryptBuffer, } from '@aztec/circuit-types'; import { CallContext, @@ -20,18 +19,24 @@ import { type TxContext, } from '@aztec/circuits.js'; import { Aes128 } from '@aztec/circuits.js/barretenberg'; -import { computePublicDataTreeLeafSlot, computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; +import { + computeInnerNoteHash, + computeNoteContentHash, + computePublicDataTreeLeafSlot, + computeUniqueNoteHash, + siloNoteHash, +} from '@aztec/circuits.js/hash'; import { type FunctionAbi, type FunctionArtifact, countArgumentsSize } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; -import { Fr, type Point } from '@aztec/foundation/fields'; +import { Fr, GrumpkinScalar, type Point } from '@aztec/foundation/fields'; import { applyStringFormatting, createDebugLogger } from '@aztec/foundation/log'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type NoteData, toACVMWitness } from '../acvm/index.js'; import { type PackedValuesCache } from '../common/packed_values_cache.js'; import { type DBOracle } from './db_oracle.js'; import { type ExecutionNoteCache } from './execution_note_cache.js'; import { CountedLog, type ExecutionResult, type NoteAndSlot } from './execution_result.js'; -import { type LogsCache } from './logs_cache.js'; import { pickNotes } from './pick_notes.js'; import { executePrivateFunction } from './private_execution.js'; import { ViewDataOracle } from './view_data_oracle.js'; @@ -59,6 +64,7 @@ export class ClientExecutionContext extends ViewDataOracle { */ private noteHashLeafIndexMap: Map = new Map(); private nullifiedNoteHashCounters: Map = new Map(); + private noteEncryptedLogs: CountedLog[] = []; private encryptedLogs: CountedLog[] = []; private unencryptedLogs: CountedLog[] = []; private nestedExecutions: ExecutionResult[] = []; @@ -76,7 +82,6 @@ export class ClientExecutionContext extends ViewDataOracle { authWitnesses: AuthWitness[], private readonly packedValuesCache: PackedValuesCache, private readonly noteCache: ExecutionNoteCache, - private readonly logsCache: LogsCache, db: DBOracle, private node: AztecNode, protected sideEffectCounter: number = 0, @@ -133,31 +138,51 @@ export class ClientExecutionContext extends ViewDataOracle { } /** - * Return the encrypted logs emitted during this execution. + * Return the note encrypted logs emitted during this execution. */ - public getEncryptedLogs() { - return this.encryptedLogs; + public getNoteEncryptedLogs() { + return this.noteEncryptedLogs; + } + + /** + * Sometimes notes can be chopped after a nested execution is complete. + * This means finished nested executions still hold transient logs. This method removes them. + * TODO(Miranda): is there a cleaner solution? + */ + public chopNoteEncryptedLogs() { + // Do not return logs that have been chopped in the cache + const allNoteLogs = this.noteCache.getLogs(); + this.noteEncryptedLogs = this.noteEncryptedLogs.filter(l => allNoteLogs.includes(l)); + const chop = (thing: any) => + thing.nestedExecutions.forEach((result: ExecutionResult) => { + if (!result.noteEncryptedLogs[0]?.isEmpty()) { + // The execution has note logs + result.noteEncryptedLogs = result.noteEncryptedLogs.filter(l => allNoteLogs.includes(l)); + } + chop(result); + }); + chop(this); } /** - * Return the encrypted logs emitted during this execution and nested executions. + * Return the note encrypted logs emitted during this execution and nested executions. */ - public getAllEncryptedLogs() { - return new EncryptedFunctionL2Logs(this.logsCache.getEncryptedLogs()); + public getAllNoteEncryptedLogs() { + return this.noteCache.getLogs(); } /** * Return the encrypted logs emitted during this execution. */ - public getUnencryptedLogs() { - return this.unencryptedLogs; + public getEncryptedLogs() { + return this.encryptedLogs; } /** - * Return the unencrypted logs emitted during this execution and nested executions. + * Return the encrypted logs emitted during this execution. */ - public getAllUnencryptedLogs() { - return new UnencryptedFunctionL2Logs(this.logsCache.getUnencryptedLogs()); + public getUnencryptedLogs() { + return this.unencryptedLogs; } /** @@ -352,14 +377,29 @@ export class ClientExecutionContext extends ViewDataOracle { log: Fr[], counter: number, ) { + // TODO(Miranda): This is a temporary solution until we encrypt logs in the circuit + // Then we require a new oracle that deals only with notes const note = new Note(log); - const l1NotePayload = new L1NotePayload(note, contractAddress, storageSlot, noteTypeId); - const taggedNote = new TaggedNote(l1NotePayload); - const encryptedNote = taggedNote.toEncryptedBuffer(publicKey); - const encryptedLog = new EncryptedL2Log(encryptedNote); - this.encryptedLogs.push(new CountedLog(encryptedLog, counter)); - this.logsCache.addEncryptedLog(encryptedLog); - return encryptedNote; + const innerNoteHash = computeInnerNoteHash(storageSlot, computeNoteContentHash(log)); + const noteExists = this.noteCache.checkNoteExists(contractAddress, innerNoteHash); + if (noteExists) { + // Log linked to note + const l1NotePayload = new L1NotePayload(note, contractAddress, storageSlot, noteTypeId); + const taggedNote = new TaggedNote(l1NotePayload); + const encryptedNote = taggedNote.toEncryptedBuffer(publicKey); + const encryptedLog = new CountedLog(new EncryptedL2Log(encryptedNote), counter); + this.noteEncryptedLogs.push(encryptedLog); + this.noteCache.addNewLog(encryptedLog, innerNoteHash); + return encryptedNote; + } else { + // Generic non-note log + // We assume only the log and address are required + const preimage = Buffer.concat([contractAddress.toBuffer(), serializeToBuffer(log)]); + const encryptedMsg = encryptBuffer(preimage, GrumpkinScalar.random(), publicKey); + const encryptedLog = new EncryptedL2Log(encryptedMsg); + this.encryptedLogs.push(new CountedLog(encryptedLog, counter)); + return encryptedMsg; + } } /** @@ -368,7 +408,6 @@ export class ClientExecutionContext extends ViewDataOracle { */ public override emitUnencryptedLog(log: UnencryptedL2Log, counter: number) { this.unencryptedLogs.push(new CountedLog(log, counter)); - this.logsCache.addUnencryptedLog(log); const text = log.toHumanReadable(); this.log.verbose(`Emitted unencrypted log: "${text.length > 100 ? text.slice(0, 100) + '...' : text}"`); } @@ -382,7 +421,6 @@ export class ClientExecutionContext extends ViewDataOracle { */ public override emitContractClassUnencryptedLog(log: UnencryptedL2Log, counter: number) { this.unencryptedLogs.push(new CountedLog(log, counter)); - this.logsCache.addUnencryptedLog(log); const text = log.toHumanReadable(); this.log.verbose( `Emitted unencrypted log from ContractClassRegisterer: "${ @@ -397,8 +435,8 @@ export class ClientExecutionContext extends ViewDataOracle { childExecutionResult.callStackItem.publicInputs.newNoteHashes.some(item => !item.isEmpty()) || childExecutionResult.callStackItem.publicInputs.newNullifiers.some(item => !item.isEmpty()) || childExecutionResult.callStackItem.publicInputs.newL2ToL1Msgs.some(item => !item.isEmpty()) || - !childExecutionResult.callStackItem.publicInputs.encryptedLogPreimagesLength.equals(new Fr(4)) || - !childExecutionResult.callStackItem.publicInputs.unencryptedLogPreimagesLength.equals(new Fr(4)) + !childExecutionResult.callStackItem.publicInputs.encryptedLogPreimagesLength.equals(Fr.ZERO) || + !childExecutionResult.callStackItem.publicInputs.unencryptedLogPreimagesLength.equals(Fr.ZERO) ) { throw new Error(`Static call cannot create new notes, emit L2->L1 messages or generate logs`); } @@ -450,7 +488,6 @@ export class ClientExecutionContext extends ViewDataOracle { this.authWitnesses, this.packedValuesCache, this.noteCache, - this.logsCache, this.db, this.node, sideEffectCounter, diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index 13bc8a9c7d4..f36616c2303 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -44,12 +44,12 @@ export interface DBOracle extends CommitmentsDB { getContractInstance(address: AztecAddress): Promise; /** - * Retrieve the complete address associated to a given address or master nullifier public key hash. - * @param accountOrNpkMHash - account address or master nullifier public key hash. - * @returns A complete address associated with the input address or master nullifier public key hash + * Retrieve the complete address associated to a given address. + * @param account - The account address. + * @returns A complete address associated with the input address. * @throws An error if the account is not registered in the database. */ - getCompleteAddress(accountOrNpkMHash: AztecAddress | Fr): Promise; + getCompleteAddress(account: AztecAddress): Promise; /** * Retrieve the auth witness for a given message hash. @@ -66,12 +66,12 @@ export interface DBOracle extends CommitmentsDB { popCapsule(): Promise; /** - * Retrieve nullifier keys associated with a specific account or master nullifier public key and app address. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Retrieve nullifier keys associated with a specific master nullifier public key and app address. + * @param npkMHash - The master nullifier public key hash. * @returns A Promise that resolves to nullifier keys. * @throws If the nullifier keys are not registered in the key store. */ - getNullifierKeys(accountOrNpkMHash: AztecAddress | Fr, contractAddress: AztecAddress): Promise; + getNullifierKeys(npkMHash: Fr, contractAddress: AztecAddress): Promise; /** * Retrieves a set of notes stored in the database for a given contract address and storage slot. diff --git a/yarn-project/simulator/src/client/execution_note_cache.ts b/yarn-project/simulator/src/client/execution_note_cache.ts index 2166f317941..a9572810981 100644 --- a/yarn-project/simulator/src/client/execution_note_cache.ts +++ b/yarn-project/simulator/src/client/execution_note_cache.ts @@ -1,8 +1,10 @@ +import { type EncryptedL2Log } from '@aztec/circuit-types'; import { siloNullifier } from '@aztec/circuits.js/hash'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { type NoteData } from '../acvm/index.js'; +import { type CountedLog } from './execution_result.js'; export interface PendingNote { note: NoteData; @@ -27,6 +29,13 @@ export class ExecutionNoteCache { */ private nullifiers: Map> = new Map(); + /** + * The list of encrypted logs linked to note hashes created in this transaction. + * This mapping maps from inner note hash to log(s) emitted for that note hash. + * Note that their value (bigint representation) is used because Frs cannot be looked up in Sets. + */ + private logs: Map[]> = new Map(); + /** * Add a new note to cache. * @param note - New note created during execution. @@ -37,6 +46,16 @@ export class ExecutionNoteCache { this.newNotes.set(note.contractAddress.toBigInt(), notes); } + /** + * Add a new note to cache. + * @param note - New note created during execution. + */ + public addNewLog(log: CountedLog, innerNoteHash: Fr) { + const logs = this.logs.get(innerNoteHash.toBigInt()) ?? []; + logs.push(log); + this.logs.set(innerNoteHash.toBigInt(), logs); + } + /** * Add a nullifier to cache. It could be for a db note or a new note created during execution. * @param contractAddress - Contract address of the note. @@ -52,7 +71,7 @@ export class ExecutionNoteCache { this.nullifiers.set(contractAddress.toBigInt(), nullifiers); let nullifiedNoteHashCounter: number | undefined = undefined; - // Find and remove the matching new note if the emitted innerNoteHash is not empty. + // Find and remove the matching new note and log(s) if the emitted innerNoteHash is not empty. if (!innerNoteHash.equals(Fr.ZERO)) { const notes = this.newNotes.get(contractAddress.toBigInt()) ?? []; const noteIndexToRemove = notes.findIndex(n => n.note.innerNoteHash.equals(innerNoteHash)); @@ -62,6 +81,8 @@ export class ExecutionNoteCache { const note = notes.splice(noteIndexToRemove, 1)[0]; nullifiedNoteHashCounter = note.counter; this.newNotes.set(contractAddress.toBigInt(), notes); + // If a log linked to the note hash does not exist, this method just does nothing + this.logs.delete(innerNoteHash.toBigInt()); } return nullifiedNoteHashCounter; @@ -96,4 +117,11 @@ export class ExecutionNoteCache { public getNullifiers(contractAddress: AztecAddress): Set { return this.nullifiers.get(contractAddress.toBigInt()) ?? new Set(); } + + /** + * Return all note logs emitted from a contract. + */ + public getLogs(): CountedLog[] { + return Array.from(this.logs.values()).flat(); + } } diff --git a/yarn-project/simulator/src/client/execution_result.test.ts b/yarn-project/simulator/src/client/execution_result.test.ts index 0da6182478d..6b6ee3ed913 100644 --- a/yarn-project/simulator/src/client/execution_result.test.ts +++ b/yarn-project/simulator/src/client/execution_result.test.ts @@ -19,6 +19,7 @@ function emptyExecutionResult(): ExecutionResult { nestedExecutions: [], enqueuedPublicFunctionCalls: [], publicTeardownFunctionCall: PublicCallRequest.empty(), + noteEncryptedLogs: [], encryptedLogs: [], unencryptedLogs: [], }; diff --git a/yarn-project/simulator/src/client/execution_result.ts b/yarn-project/simulator/src/client/execution_result.ts index a80b7713cd2..1a734146703 100644 --- a/yarn-project/simulator/src/client/execution_result.ts +++ b/yarn-project/simulator/src/client/execution_result.ts @@ -58,6 +58,11 @@ export interface ExecutionResult { enqueuedPublicFunctionCalls: PublicCallRequest[]; /** Public function execution requested for teardown */ publicTeardownFunctionCall: PublicCallRequest; + /** + * Encrypted note logs emitted during execution of this function call. + * Note: These are preimages to `noteEncryptedLogsHashes`. + */ + noteEncryptedLogs: CountedLog[]; /** * Encrypted logs emitted during execution of this function call. * Note: These are preimages to `encryptedLogsHashes`. @@ -82,13 +87,32 @@ export function collectNullifiedNoteHashCounters(execResult: ExecutionResult, ac return accum; } +/** + * Collect all encrypted logs across all nested executions. + * @param execResult - The topmost execution result. + * @returns All encrypted logs. + */ +function collectNoteEncryptedLogs(execResult: ExecutionResult): CountedLog[] { + return [execResult.noteEncryptedLogs, ...execResult.nestedExecutions.flatMap(collectNoteEncryptedLogs)].flat(); +} + +/** + * Collect all encrypted logs across all nested executions and sorts by counter. + * @param execResult - The topmost execution result. + * @returns All encrypted logs. + */ +export function collectSortedNoteEncryptedLogs(execResult: ExecutionResult): EncryptedFunctionL2Logs { + const allLogs = collectNoteEncryptedLogs(execResult); + const sortedLogs = sortByCounter(allLogs); + return new EncryptedFunctionL2Logs(sortedLogs.map(l => l.log)); +} /** * Collect all encrypted logs across all nested executions. * @param execResult - The topmost execution result. * @returns All encrypted logs. */ function collectEncryptedLogs(execResult: ExecutionResult): CountedLog[] { - return [execResult.encryptedLogs, ...[...execResult.nestedExecutions].flatMap(collectEncryptedLogs)].flat(); + return [execResult.encryptedLogs, ...execResult.nestedExecutions.flatMap(collectEncryptedLogs)].flat(); } /** @@ -108,7 +132,7 @@ export function collectSortedEncryptedLogs(execResult: ExecutionResult): Encrypt * @returns All unencrypted logs. */ function collectUnencryptedLogs(execResult: ExecutionResult): CountedLog[] { - return [execResult.unencryptedLogs, ...[...execResult.nestedExecutions].flatMap(collectUnencryptedLogs)].flat(); + return [execResult.unencryptedLogs, ...execResult.nestedExecutions.flatMap(collectUnencryptedLogs)].flat(); } /** diff --git a/yarn-project/simulator/src/client/logs_cache.ts b/yarn-project/simulator/src/client/logs_cache.ts deleted file mode 100644 index c1e52ae7d8e..00000000000 --- a/yarn-project/simulator/src/client/logs_cache.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { type EncryptedL2Log, type UnencryptedL2Log } from '@aztec/circuit-types'; - -/** - * Log data that's accessible by all the function calls in an execution. - * This class exists to: - * 1. Keep track of logs emitted through nested calls in the correct order. - * 2. TODO(1641): Remove encrypted logs based on notes nullified in the same scope. - */ -export class LogsCache { - /** - * Logs notes created in this transaction. - */ - private encryptedLogs: EncryptedL2Log[] = []; - private unencryptedLogs: UnencryptedL2Log[] = []; - - // TODO Separate encrypted logs linked to note hashes and arbitrary logs: - - // Maps from note hash to encrypted log - useful for removing transient logs - // private encryptedLogsLinkedToNotes: Map = new Map(); - - // /** - // * Remove the encrypted log for a nullified note. - // * This fn should only be called if the note's innerNoteHash != 0. - // * @param noteHashCounter - Side effect counter of the note. - // */ - // public nullifyNote(noteHashCounter: Fr) { - // // Find and remove the matching new note if the emitted innerNoteHash is not empty. - // const log = this.encryptedLogsLinkedToNotes.get(noteHashCounter.toBigInt()) ?? false; - // // TODO: throw here? Will the log always be here? - // if (!log) { - // throw new Error('Attempt to remove a pending note log that does not exist.'); - // } - // this.encryptedLogsLinkedToNotes.delete(noteHashCounter.toBigInt()); - // } - - /** - * Add a new encrypted log to cache. - * @param log - New log created during execution. - */ - public addEncryptedLog(log: EncryptedL2Log) { - this.encryptedLogs.push(log); - } - - /** - * Add a new unencrypted log to cache. - * @param log - New log created during execution. - */ - public addUnencryptedLog(log: UnencryptedL2Log) { - this.unencryptedLogs.push(log); - } - - /** - * Return the encrypted logs. - */ - public getEncryptedLogs() { - return this.encryptedLogs; - } - - /** - * Return the encrypted logs. - */ - public getUnencryptedLogs() { - return this.unencryptedLogs; - } -} diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index a09bfdbe780..7750808fd5d 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -141,7 +141,7 @@ describe('Private Execution test suite', () => { // Create a new snapshot. const newSnap = new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.getRoot(true)), Number(tree.getNumLeaves(true))); - if (name === 'noteHash' || name === 'l1ToL2Messages' || 'publicData') { + if (name === 'noteHash' || name === 'l1ToL2Messages' || name === 'publicData') { header = new Header( header.lastArchive, header.contentCommitment, @@ -167,9 +167,10 @@ describe('Private Execution test suite', () => { return trees[name]; }; - const getEncryptedSerializedLength = (result: ExecutionResult) => { - const fnLogs = new EncryptedFunctionL2Logs(result.encryptedLogs.map(l => l.log)); - return fnLogs.getSerializedLength(); + const getEncryptedNoteSerializedLength = (result: ExecutionResult) => { + const fnLogs = new EncryptedFunctionL2Logs(result.noteEncryptedLogs.map(l => l.log)); + // We take 4 to avoid counting the extra 4 bytes used to store len for L1 + return fnLogs.getSerializedLength() - 4; }; beforeAll(() => { @@ -190,37 +191,35 @@ describe('Private Execution test suite', () => { beforeEach(async () => { trees = {}; oracle = mock(); - oracle.getNullifierKeys.mockImplementation( - (accountOrNpkMHash: AztecAddress | Fr, contractAddress: AztecAddress) => { - if (accountOrNpkMHash.equals(ownerCompleteAddress.address)) { - return Promise.resolve({ - masterNullifierPublicKey: ownerCompleteAddress.publicKeys.masterNullifierPublicKey, - appNullifierSecretKey: computeAppNullifierSecretKey(ownerMasterNullifierSecretKey, contractAddress), - }); - } - if (accountOrNpkMHash.equals(recipientCompleteAddress.address)) { - return Promise.resolve({ - masterNullifierPublicKey: recipientCompleteAddress.publicKeys.masterNullifierPublicKey, - appNullifierSecretKey: computeAppNullifierSecretKey(recipientMasterNullifierSecretKey, contractAddress), - }); - } - throw new Error(`Unknown address ${accountOrNpkMHash}`); - }, - ); + oracle.getNullifierKeys.mockImplementation((masterNullifierPublicKeyHash: Fr, contractAddress: AztecAddress) => { + if (masterNullifierPublicKeyHash.equals(ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { + return Promise.resolve({ + masterNullifierPublicKey: ownerCompleteAddress.publicKeys.masterNullifierPublicKey, + appNullifierSecretKey: computeAppNullifierSecretKey(ownerMasterNullifierSecretKey, contractAddress), + }); + } + if (masterNullifierPublicKeyHash.equals(recipientCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { + return Promise.resolve({ + masterNullifierPublicKey: recipientCompleteAddress.publicKeys.masterNullifierPublicKey, + appNullifierSecretKey: computeAppNullifierSecretKey(recipientMasterNullifierSecretKey, contractAddress), + }); + } + throw new Error(`Unknown master nullifier public key hash: ${masterNullifierPublicKeyHash}`); + }); // We call insertLeaves here with no leaves to populate empty public data tree root --> this is necessary to be // able to get ivpk_m during execution await insertLeaves([], 'publicData'); oracle.getHeader.mockResolvedValue(header); - oracle.getCompleteAddress.mockImplementation((accountOrNpkMHash: AztecAddress | Fr) => { - if (accountOrNpkMHash.equals(owner)) { + oracle.getCompleteAddress.mockImplementation((address: AztecAddress) => { + if (address.equals(owner)) { return Promise.resolve(ownerCompleteAddress); } - if (accountOrNpkMHash.equals(recipient)) { + if (address.equals(recipient)) { return Promise.resolve(recipientCompleteAddress); } - throw new Error(`Unknown address ${accountOrNpkMHash}`); + throw new Error(`Unknown address: ${address}`); }); // This oracle gets called when reading ivpk_m from key registry --> we return zero witness indicating that // the keys were not registered. This triggers non-registered keys flow in which getCompleteAddress oracle @@ -249,8 +248,9 @@ describe('Private Execution test suite', () => { const [unencryptedLog] = newUnencryptedLogs; expect(unencryptedLog.value).toEqual(Fr.fromBuffer(functionLogs.logs[0].hash())); + // We take 4 to avoid counting the extra 4 bytes used to store len for L1 expect(result.callStackItem.publicInputs.unencryptedLogPreimagesLength).toEqual( - new Fr(functionLogs.getSerializedLength()), + new Fr(functionLogs.getSerializedLength() - 4), ); // Test that the log payload (ie ignoring address, selector, and header) matches what we emitted expect(functionLogs.logs[0].data.subarray(-32).toString('hex')).toEqual(owner.toBuffer().toString('hex')); @@ -268,8 +268,9 @@ describe('Private Execution test suite', () => { const [unencryptedLog] = newUnencryptedLogs; expect(unencryptedLog.value).toEqual(Fr.fromBuffer(functionLogs.logs[0].hash())); + // We take 4 to avoid counting the extra 4 bytes used to store len for L1 expect(result.callStackItem.publicInputs.unencryptedLogPreimagesLength).toEqual( - new Fr(functionLogs.getSerializedLength()), + new Fr(functionLogs.getSerializedLength() - 4), ); // Test that the log payload (ie ignoring address, selector, and header) matches what we emitted const expected = Buffer.concat(args[0].map(arg => arg.toBuffer())).toString('hex'); @@ -282,7 +283,7 @@ describe('Private Execution test suite', () => { const mockFirstNullifier = new Fr(1111); let currentNoteIndex = 0n; - const buildNote = (amount: bigint, owner: AztecAddress, storageSlot: Fr, noteTypeId: Fr) => { + const buildNote = (amount: bigint, ownerNpkMHash: Fr, storageSlot: Fr, noteTypeId: Fr) => { // WARNING: this is not actually how nonces are computed! // For the purpose of this test we use a mocked firstNullifier and and a random number // to compute the nonce. Proper nonces are only enforced later by the kernel/later circuits @@ -293,7 +294,7 @@ describe('Private Execution test suite', () => { // `hash(firstNullifier, noteHashIndex)` const noteHashIndex = randomInt(1); // mock index in TX's final newNoteHashes array const nonce = computeNoteHashNonce(mockFirstNullifier, noteHashIndex); - const note = new Note([new Fr(amount), owner.toField(), Fr.random()]); + const note = new Note([new Fr(amount), ownerNpkMHash, Fr.random()]); const innerNoteHash = pedersenHash(note.items); return { contractAddress, @@ -341,13 +342,14 @@ describe('Private Execution test suite', () => { ), ); - const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(1); const [encryptedLog] = newEncryptedLogs; - expect(encryptedLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[0].log.hash())); + expect(encryptedLog.noteHashCounter).toEqual(newNoteHashes[0].counter); + expect(encryptedLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[0].log.hash())); expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), + new Fr(getEncryptedNoteSerializedLength(result)), ); }); @@ -372,13 +374,14 @@ describe('Private Execution test suite', () => { ), ); - const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(1); const [encryptedLog] = newEncryptedLogs; - expect(encryptedLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[0].log.hash())); + expect(encryptedLog.noteHashCounter).toEqual(newNoteHashes[0].counter); + expect(encryptedLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[0].log.hash())); expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), + new Fr(getEncryptedNoteSerializedLength(result)), ); }); @@ -394,7 +397,10 @@ describe('Private Execution test suite', () => { const noteTypeId = StatefulTestContractArtifact.notes['ValueNote'].id; - const notes = [buildNote(60n, owner, storageSlot, noteTypeId), buildNote(80n, owner, storageSlot, noteTypeId)]; + const notes = [ + buildNote(60n, ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash(), storageSlot, noteTypeId), + buildNote(80n, ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash(), storageSlot, noteTypeId), + ]; oracle.getNotes.mockResolvedValue(notes); const consumedNotes = await asyncMap(notes, ({ nonce, note }) => @@ -418,24 +424,26 @@ describe('Private Execution test suite', () => { const newNoteHashes = getNonEmptyItems(result.callStackItem.publicInputs.newNoteHashes); expect(newNoteHashes).toHaveLength(2); const [changeNoteHash, recipientNoteHash] = newNoteHashes; - expect(recipientNoteHash.value).toEqual( + const [recipientInnerNoteHash, changeInnerNoteHash] = [ await acirSimulator.computeInnerNoteHash(contractAddress, recipientStorageSlot, noteTypeId, recipientNote.note), - ); - expect(changeNoteHash.value).toEqual( await acirSimulator.computeInnerNoteHash(contractAddress, storageSlot, noteTypeId, changeNote.note), - ); + ]; + expect(recipientNoteHash.value).toEqual(recipientInnerNoteHash); + expect(changeNoteHash.value).toEqual(changeInnerNoteHash); expect(recipientNote.note.items[0]).toEqual(new Fr(amountToTransfer)); expect(changeNote.note.items[0]).toEqual(new Fr(40n)); - const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(2); const [encryptedChangeLog, encryptedRecipientLog] = newEncryptedLogs; - expect(encryptedChangeLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[0].log.hash())); - expect(encryptedRecipientLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[1].log.hash())); + expect(encryptedChangeLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[0].log.hash())); + expect(encryptedChangeLog.noteHashCounter).toEqual(changeNoteHash.counter); + expect(encryptedRecipientLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[1].log.hash())); + expect(encryptedRecipientLog.noteHashCounter).toEqual(recipientNoteHash.counter); expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), + new Fr(getEncryptedNoteSerializedLength(result)), ); const readRequests = getNonEmptyItems(result.callStackItem.publicInputs.noteHashReadRequests).map(r => r.value); @@ -451,7 +459,9 @@ describe('Private Execution test suite', () => { const storageSlot = computeSlotForMapping(new Fr(1n), owner); const noteTypeId = StatefulTestContractArtifact.notes['ValueNote'].id; - const notes = [buildNote(balance, owner, storageSlot, noteTypeId)]; + const notes = [ + buildNote(balance, ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash(), storageSlot, noteTypeId), + ]; oracle.getNotes.mockResolvedValue(notes); const consumedNotes = await asyncMap(notes, ({ nonce, note }) => @@ -470,13 +480,15 @@ describe('Private Execution test suite', () => { expect(recipientNote.note.items[0]).toEqual(new Fr(amountToTransfer)); expect(changeNote.note.items[0]).toEqual(new Fr(balance - amountToTransfer)); - const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(2); const [encryptedChangeLog, encryptedRecipientLog] = newEncryptedLogs; - expect(encryptedChangeLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[0].log.hash())); - expect(encryptedRecipientLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[1].log.hash())); + expect(encryptedChangeLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[0].log.hash())); + expect(encryptedChangeLog.noteHashCounter).toEqual(result.callStackItem.publicInputs.newNoteHashes[0].counter); + expect(encryptedRecipientLog.value).toEqual(Fr.fromBuffer(result.noteEncryptedLogs[1].log.hash())); + expect(encryptedRecipientLog.noteHashCounter).toEqual(result.callStackItem.publicInputs.newNoteHashes[1].counter); expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), + new Fr(getEncryptedNoteSerializedLength(result)), ); }); }); @@ -933,14 +945,14 @@ describe('Private Execution test suite', () => { ); expect(noteHash).toEqual(innerNoteHash); - const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(1); const [encryptedLog] = newEncryptedLogs; - expect(encryptedLog.value).toEqual(Fr.fromBuffer(result.encryptedLogs[0].log.hash())); - expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), - ); + expect(encryptedLog.noteHashCounter).toEqual(newNoteHashes[0].counter); + // We expect the note log to be chopped in ts. + // (note logs are chopped in kernel tail, so will still exist in the call stack item) + expect(result.noteEncryptedLogs).toHaveLength(0); // read request should match innerNoteHash for pending notes (there is no nonce, so can't compute "unique" hash) const readRequest = getNonEmptyItems(result.callStackItem.publicInputs.noteHashReadRequests)[0]; @@ -1012,14 +1024,14 @@ describe('Private Execution test suite', () => { ); expect(noteHash).toEqual(innerNoteHash); - const newEncryptedLogs = getNonEmptyItems(execInsert.callStackItem.publicInputs.encryptedLogsHashes); + const newEncryptedLogs = getNonEmptyItems(execInsert.callStackItem.publicInputs.noteEncryptedLogsHashes); expect(newEncryptedLogs).toHaveLength(1); const [encryptedLog] = newEncryptedLogs; - expect(encryptedLog.value).toEqual(Fr.fromBuffer(execInsert.encryptedLogs[0].log.hash())); - expect(result.callStackItem.publicInputs.encryptedLogPreimagesLength).toEqual( - new Fr(getEncryptedSerializedLength(result)), - ); + expect(encryptedLog.noteHashCounter).toEqual(newNoteHashes[0].counter); + // We expect the note log to be chopped in ts. + // (note logs are chopped in kernel tail, so will still exist in the call stack item) + expect(execInsert.noteEncryptedLogs).toHaveLength(0); // read request should match innerNoteHash for pending notes (there is no nonce, so can't compute "unique" hash) const readRequest = execGetThenNullify.callStackItem.publicInputs.noteHashReadRequests[0]; diff --git a/yarn-project/simulator/src/client/private_execution.ts b/yarn-project/simulator/src/client/private_execution.ts index abab2f2f46f..f774bf9208f 100644 --- a/yarn-project/simulator/src/client/private_execution.ts +++ b/yarn-project/simulator/src/client/private_execution.ts @@ -42,6 +42,8 @@ export async function executePrivateFunction( const returnWitness = witnessMapToFields(acirExecutionResult.returnWitness); const publicInputs = PrivateCircuitPublicInputs.fromFields(returnWitness); + context.chopNoteEncryptedLogs(); + const noteEncryptedLogs = context.getNoteEncryptedLogs(); const encryptedLogs = context.getEncryptedLogs(); const unencryptedLogs = context.getUnencryptedLogs(); @@ -69,6 +71,7 @@ export async function executePrivateFunction( vk: Buffer.from(artifact.verificationKey!, 'hex'), nestedExecutions, enqueuedPublicFunctionCalls, + noteEncryptedLogs, publicTeardownFunctionCall, encryptedLogs, unencryptedLogs, diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index 5eb1ecde8d4..f4cdbd7c97b 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -9,7 +9,7 @@ import { import { ABIParameterVisibility, type FunctionArtifact, getFunctionArtifact } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2Hash } from '@aztec/foundation/crypto'; -import { Fr } from '@aztec/foundation/fields'; +import { Fr, type Point } from '@aztec/foundation/fields'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -22,7 +22,7 @@ describe('Simulator', () => { let node: MockProxy; let simulator: AcirSimulator; - let owner: AztecAddress; + let ownerMasterNullifierPublicKey: Point; let contractAddress: AztecAddress; let appNullifierSecretKey: Fr; @@ -30,14 +30,13 @@ describe('Simulator', () => { const ownerSk = Fr.fromString('2dcc5485a58316776299be08c78fa3788a1a7961ae30dc747fb1be17692a8d32'); const allOwnerKeys = deriveKeys(ownerSk); - const ownerMasterNullifierPublicKey = allOwnerKeys.publicKeys.masterNullifierPublicKey; + ownerMasterNullifierPublicKey = allOwnerKeys.publicKeys.masterNullifierPublicKey; const ownerMasterNullifierSecretKey = allOwnerKeys.masterNullifierSecretKey; contractAddress = AztecAddress.random(); const ownerPartialAddress = Fr.random(); const ownerCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, ownerPartialAddress); - owner = ownerCompleteAddress.address; appNullifierSecretKey = computeAppNullifierSecretKey(ownerMasterNullifierSecretKey, contractAddress); @@ -58,7 +57,7 @@ describe('Simulator', () => { const storageSlot = TokenContractArtifact.storageLayout['balances'].slot; const noteTypeId = TokenContractArtifact.notes['TokenNote'].id; - const createNote = (amount = 123n) => new Note([new Fr(amount), owner.toField(), Fr.random()]); + const createNote = (amount = 123n) => new Note([new Fr(amount), ownerMasterNullifierPublicKey.hash(), Fr.random()]); it('should compute note hashes and nullifier', async () => { oracle.getFunctionArtifactByName.mockResolvedValue(artifact); diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index 0eebf76e26a..1c3c8490f84 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -19,7 +19,6 @@ import { ClientExecutionContext } from './client_execution_context.js'; import { type DBOracle } from './db_oracle.js'; import { ExecutionNoteCache } from './execution_note_cache.js'; import { type ExecutionResult } from './execution_result.js'; -import { LogsCache } from './logs_cache.js'; import { executePrivateFunction } from './private_execution.js'; import { executeUnconstrainedFunction } from './unconstrained_execution.js'; import { ViewDataOracle } from './view_data_oracle.js'; @@ -101,7 +100,6 @@ export class AcirSimulator { request.authWitnesses, PackedValuesCache.create(request.argsOfCalls), new ExecutionNoteCache(), - new LogsCache(), this.db, this.node, startSideEffectCounter, diff --git a/yarn-project/simulator/src/client/unconstrained_execution.test.ts b/yarn-project/simulator/src/client/unconstrained_execution.test.ts index 3d1b69cb0ca..9077aa0489e 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.test.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.test.ts @@ -33,11 +33,11 @@ describe('Unconstrained Execution test suite', () => { const ownerCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSecretKey, Fr.random()); owner = ownerCompleteAddress.address; - oracle.getCompleteAddress.mockImplementation((accountOrNpkMHash: AztecAddress | Fr) => { - if (accountOrNpkMHash.equals(owner)) { + oracle.getCompleteAddress.mockImplementation((account: AztecAddress) => { + if (account.equals(owner)) { return Promise.resolve(ownerCompleteAddress); } - throw new Error(`Unknown address ${accountOrNpkMHash}`); + throw new Error(`Unknown address ${account}`); }); }); diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index bca1a2b0697..68479c6266b 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -35,13 +35,13 @@ export class ViewDataOracle extends TypedOracle { } /** - * Retrieve nullifier keys associated with a specific account or master nullifier public key and app address. - * @param accountOrNpkMHash - account address or master nullifier public key hash. + * Retrieve nullifier keys associated with a specific master nullifier public key and app address. + * @param npkMHash - The master nullifier public key hash. * @returns A Promise that resolves to nullifier keys. * @throws If the nullifier keys are not registered in the key store. */ - public override getNullifierKeys(accountOrNpkMHash: AztecAddress | Fr): Promise { - return this.db.getNullifierKeys(accountOrNpkMHash, this.contractAddress); + public override getNullifierKeys(npkMHash: Fr): Promise { + return this.db.getNullifierKeys(npkMHash, this.contractAddress); } /** @@ -127,13 +127,13 @@ export class ViewDataOracle extends TypedOracle { } /** - * Retrieve the complete address associated to a given address or master nullifier public key hash. - * @param accountOrNpkMHash - account address or master nullifier public key hash. - * @returns A complete address associated with the input address or master nullifier public key hash + * Retrieve the complete address associated to a given address. + * @param account - The account address. + * @returns A complete address associated with the input address. * @throws An error if the account is not registered in the database. */ - public override getCompleteAddress(accountOrNpkMHash: AztecAddress | Fr): Promise { - return this.db.getCompleteAddress(accountOrNpkMHash); + public override getCompleteAddress(account: AztecAddress): Promise { + return this.db.getCompleteAddress(account); } /** diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index b39468eb94a..3da3ffa306e 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -18,6 +18,7 @@ import { type Header, type KernelCircuitPublicInputs, L2ToL1Message, + LogHash, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, @@ -44,7 +45,6 @@ import { PublicKernelData, ReadRequest, RevertCode, - SideEffect, VK_TREE_HEIGHT, VerificationKey, makeEmptyProof, @@ -451,11 +451,7 @@ export abstract class AbstractPhaseManager { MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, ), publicCallStackHashes, - unencryptedLogsHashes: padArrayEnd( - result.unencryptedLogsHashes, - SideEffect.empty(), - MAX_UNENCRYPTED_LOGS_PER_CALL, - ), + unencryptedLogsHashes: padArrayEnd(result.unencryptedLogsHashes, LogHash.empty(), MAX_UNENCRYPTED_LOGS_PER_CALL), unencryptedLogPreimagesLength: result.unencryptedLogPreimagesLength, historicalHeader: this.historicalHeader, globalVariables: this.globalVariables, diff --git a/yarn-project/simulator/src/public/execution.ts b/yarn-project/simulator/src/public/execution.ts index 2aaccc9ffd4..7f5a1303568 100644 --- a/yarn-project/simulator/src/public/execution.ts +++ b/yarn-project/simulator/src/public/execution.ts @@ -4,13 +4,13 @@ import { type ContractStorageUpdateRequest, type Fr, type L2ToL1Message, + type LogHash, type NoteHash, type Nullifier, type PublicCallRequest, PublicDataRead, PublicDataUpdateRequest, type ReadRequest, - type SideEffect, } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot, computePublicDataTreeValue } from '@aztec/circuits.js/hash'; @@ -48,7 +48,7 @@ export interface PublicExecutionResult { * The hashed logs with side effect counter. * Note: required as we don't track the counter anywhere else. */ - unencryptedLogsHashes: SideEffect[]; + unencryptedLogsHashes: LogHash[]; /** * Unencrypted logs emitted during execution of this function call. * Note: These are preimages to `unencryptedLogsHashes`. diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 7404e15237a..9157508a9fb 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -11,6 +11,7 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { spawn } from 'child_process'; +import { assert } from 'console'; import fs from 'fs/promises'; import path from 'path'; @@ -27,7 +28,12 @@ import { PackedValuesCache } from '../common/packed_values_cache.js'; import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from './db.js'; import { type PublicExecution, type PublicExecutionResult, checkValidStaticCall } from './execution.js'; import { PublicExecutionContext } from './public_execution_context.js'; -import { convertAvmResultsToPxResult, createAvmExecutionEnvironment, isAvmBytecode } from './transitional_adaptors.js'; +import { + convertAvmResultsToPxResult, + createAvmExecutionEnvironment, + decompressBytecodeIfCompressed, + isAvmBytecode, +} from './transitional_adaptors.js'; /** * Execute a public function and return the execution result. @@ -46,7 +52,7 @@ export async function executePublicFunction( ); } - if (isAvmBytecode(bytecode)) { + if (await isAvmBytecode(bytecode)) { return await executeTopLevelPublicFunctionAvm(context, bytecode); } else { return await executePublicFunctionAcvm(context, bytecode, nested); @@ -185,7 +191,7 @@ async function executePublicFunctionAcvm( nestedExecutions: [], unencryptedLogsHashes: [], unencryptedLogs: UnencryptedFunctionL2Logs.empty(), - unencryptedLogPreimagesLength: new Fr(4n), // empty logs have len 4 + unencryptedLogPreimagesLength: Fr.ZERO, allUnencryptedLogs: UnencryptedFunctionL2Logs.empty(), reverted, revertReason, @@ -355,7 +361,10 @@ export class PublicExecutor { const proofPath = path.join(artifactsPath, 'proof'); const { args, functionData, contractAddress } = avmExecution; - const bytecode = await this.contractsDb.getBytecode(contractAddress, functionData.selector); + let bytecode = await this.contractsDb.getBytecode(contractAddress, functionData.selector); + assert(!!bytecode, `Bytecode not found for ${contractAddress}:${functionData.selector}`); + // This should be removed once we do bytecode validation. + bytecode = await decompressBytecodeIfCompressed(bytecode!); // Write call data and bytecode to files. await fs.writeFile( calldataPath, diff --git a/yarn-project/simulator/src/public/index.test.ts b/yarn-project/simulator/src/public/index.test.ts index 76131633890..9b17a5908bf 100644 --- a/yarn-project/simulator/src/public/index.test.ts +++ b/yarn-project/simulator/src/public/index.test.ts @@ -338,8 +338,9 @@ describe('ACIR public execution simulator', () => { expect(Fr.fromBuffer(childExecutionResult.unencryptedLogs.logs[0].hash())).toEqual( childExecutionResult.unencryptedLogsHashes[0].value, ); + // We take 4 to avoid counting the extra 4 bytes used to store len for L1 expect(childExecutionResult.unencryptedLogPreimagesLength).toEqual( - new Fr(childExecutionResult.unencryptedLogs.getSerializedLength()), + new Fr(childExecutionResult.unencryptedLogs.getSerializedLength() - 4), ); expect(result.returnValues[0]).toEqual(new Fr(newValue)); }, 20_000); diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 5495342516e..2e51cd93f3a 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -102,6 +102,7 @@ describe('public_processor', () => { hash, data: tx.data.toKernelCircuitPublicInputs(), proof: tx.proof, + noteEncryptedLogs: tx.noteEncryptedLogs, encryptedLogs: tx.encryptedLogs, unencryptedLogs: tx.unencryptedLogs, isEmpty: false, diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index 06b7f732b47..be324885e6f 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -1,9 +1,10 @@ import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { - Fr, + type Fr, type GlobalVariables, type Header, type KernelCircuitPublicInputs, + type LogHash, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -12,7 +13,6 @@ import { type Proof, type PublicKernelCircuitPublicInputs, PublicKernelTailCircuitPrivateInputs, - SideEffect, makeEmptyProof, mergeAccumulatedData, sortByCounter, @@ -159,10 +159,8 @@ export class TailPhaseManager extends AbstractPhaseManager { return sortByCounter(noteHashes).map(n => n.value) as Tuple; } - private sortLogsHashes(unencryptedLogsHashes: Tuple): Tuple { + private sortLogsHashes(unencryptedLogsHashes: Tuple): Tuple { // TODO(6052): logs here may have duplicate counters from nested calls - return sortByCounter( - unencryptedLogsHashes.map(n => ({ ...n, counter: n.counter.toNumber(), isEmpty: () => n.isEmpty() })), - ).map(h => new SideEffect(h.value, new Fr(h.counter))) as Tuple; + return sortByCounter(unencryptedLogsHashes); } } diff --git a/yarn-project/simulator/src/public/transitional_adaptors.ts b/yarn-project/simulator/src/public/transitional_adaptors.ts index 161b10091c8..74e6d004788 100644 --- a/yarn-project/simulator/src/public/transitional_adaptors.ts +++ b/yarn-project/simulator/src/public/transitional_adaptors.ts @@ -10,6 +10,9 @@ import { } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; +import { promisify } from 'util'; +import { gunzip } from 'zlib'; + import { type AvmContext } from '../avm/avm_context.js'; import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; @@ -111,7 +114,19 @@ export function markBytecodeAsAvm(bytecode: Buffer): Buffer { return Buffer.concat([bytecode, AVM_MAGIC_SUFFIX]); } -export function isAvmBytecode(bytecode: Buffer): boolean { +// This is just a helper function for the AVM circuit. +export async function decompressBytecodeIfCompressed(bytecode: Buffer): Promise { + try { + return await promisify(gunzip)(bytecode); + } catch { + // If the bytecode is not compressed, the gunzip call will throw an error + // In this case, we assume the bytecode is not compressed and continue. + return Promise.resolve(bytecode); + } +} + +export async function isAvmBytecode(bytecode: Buffer): Promise { + const decompressedBytecode = await decompressBytecodeIfCompressed(bytecode); const magicSize = AVM_MAGIC_SUFFIX.length; - return bytecode.subarray(-magicSize).equals(AVM_MAGIC_SUFFIX); + return decompressedBytecode.subarray(-magicSize).equals(AVM_MAGIC_SUFFIX); } diff --git a/yarn-project/simulator/src/public/utils.ts b/yarn-project/simulator/src/public/utils.ts index c48798ff350..f824cf9e92f 100644 --- a/yarn-project/simulator/src/public/utils.ts +++ b/yarn-project/simulator/src/public/utils.ts @@ -11,6 +11,7 @@ export function lastSideEffectCounter(tx: Tx): number { const sideEffectCounters = [ ...data.endNonRevertibleData.newNoteHashes, ...data.endNonRevertibleData.newNullifiers, + ...data.endNonRevertibleData.unencryptedLogsHashes, ...data.endNonRevertibleData.publicCallStack, ...data.end.newNoteHashes, ...data.end.newNullifiers, @@ -24,8 +25,7 @@ export function lastSideEffectCounter(tx: Tx): number { // look at both start and end counters because for enqueued public calls start > 0 while end === 0 max = Math.max(max, sideEffect.startSideEffectCounter.toNumber(), sideEffect.endSideEffectCounter.toNumber()); } else { - const counter = typeof sideEffect.counter === 'number' ? sideEffect.counter : sideEffect.counter.toNumber(); - max = Math.max(max, counter); + max = Math.max(max, sideEffect.counter); } }